gitabtion/SoftMaskedBert-PyTorch

关于cor_label和det_labels的意义。

kovnew opened this issue · 0 comments

您好,想问一下,cor_labels和det_labels分别代表什么意思呢?

class BertForCsc(CscTrainingModel):
    def __init__(self, cfg, tokenizer):
        super().__init__(cfg)
        self.cfg = cfg
        self.bert = BertForMaskedLM.from_pretrained(cfg.MODEL.BERT_CKPT)
        self.tokenizer = tokenizer

    def forward(self, texts, cor_labels=None, det_labels=None):
        if cor_labels is not None:
            text_labels = self.tokenizer(cor_labels, padding=True, return_tensors='pt')['input_ids']
            text_labels = text_labels.to(self.cfg.MODEL.DEVICE)
            print('text labels: ', text_labels)
            # Tokens with indices set to -100 are ignored (masked)
            text_labels[text_labels == 0] = -100