From 2d8d32d8c94c7202093971cca04198e67c0d923a Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 27 Jan 2026 10:12:59 +0000 Subject: [PATCH 1/2] Initial plan From 7ccc661bed8ff1da5cc609290be31aa35852a9c2 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Tue, 27 Jan 2026 10:16:07 +0000 Subject: [PATCH 2/2] Initialize label_attention_matrix to None before text_embedder branch Co-authored-by: meilame-tayebjee <114609737+meilame-tayebjee@users.noreply.github.com> --- torchTextClassifiers/model/model.py | 1 + 1 file changed, 1 insertion(+) diff --git a/torchTextClassifiers/model/model.py b/torchTextClassifiers/model/model.py index d253a66..5e4cc66 100644 --- a/torchTextClassifiers/model/model.py +++ b/torchTextClassifiers/model/model.py @@ -134,6 +134,7 @@ def forward( Raw, not softmaxed. """ encoded_text = input_ids # clearer name + label_attention_matrix = None if self.text_embedder is None: x_text = encoded_text.float() else: