emanuelaboros commited on
Commit
1d52a4b
·
1 Parent(s): a8e1b6b
Files changed (3) hide show
  1. LID-40-3-2000000-1-4.bin +3 -0
  2. config.json +2 -1
  3. modeling_stacked.py +3 -1
LID-40-3-2000000-1-4.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:987a2e16b216eb22f0342beb75874e9748cf6bceeb4ac75f6e2efc3414e74961
3
+ size 32001553
config.json CHANGED
@@ -1,8 +1,9 @@
1
  {
2
- "_name_or_path": "experiments_final/model_dbmdz_bert_medium_historic_multilingual_cased_max_sequence_length_512_epochs_5_run_extended_suffix_baseline/checkpoint-450",
3
  "architectures": [
4
  "ExtendedMultitaskModelForTokenClassification"
5
  ],
 
6
  "attention_probs_dropout_prob": 0.1,
7
  "auto_map": {
8
  "AutoConfig": "configuration_stacked.ImpressoConfig",
 
1
  {
2
+ "_name_or_path": "emanuelaboros/ner-stacked-bert-multilingual",
3
  "architectures": [
4
  "ExtendedMultitaskModelForTokenClassification"
5
  ],
6
+ "filename": "LID-40-3-2000000-1-4.bin",
7
  "attention_probs_dropout_prob": 0.1,
8
  "auto_map": {
9
  "AutoConfig": "configuration_stacked.ImpressoConfig",
modeling_stacked.py CHANGED
@@ -5,7 +5,7 @@ from transformers import PreTrainedModel, AutoModel, AutoConfig, BertConfig
5
  from torch.nn import CrossEntropyLoss
6
  from typing import Optional, Tuple, Union
7
  import logging, json, os
8
-
9
  from .configuration_stacked import ImpressoConfig
10
 
11
  logger = logging.getLogger(__name__)
@@ -29,6 +29,8 @@ class ExtendedMultitaskModelForTokenClassification(PreTrainedModel):
29
  self.bert = AutoModel.from_pretrained(
30
  config.pretrained_config["_name_or_path"], config=config.pretrained_config
31
  )
 
 
32
  if "classifier_dropout" not in config.__dict__:
33
  classifier_dropout = 0.1
34
  else:
 
5
  from torch.nn import CrossEntropyLoss
6
  from typing import Optional, Tuple, Union
7
  import logging, json, os
8
+ import floret
9
  from .configuration_stacked import ImpressoConfig
10
 
11
  logger = logging.getLogger(__name__)
 
29
  self.bert = AutoModel.from_pretrained(
30
  config.pretrained_config["_name_or_path"], config=config.pretrained_config
31
  )
32
+ self.model_floret = floret.load_model(self.config.filename)
33
+ print(f"Model loaded: {self.model_floret}")
34
  if "classifier_dropout" not in config.__dict__:
35
  classifier_dropout = 0.1
36
  else: