WebashalarForML commited on
Commit
83f32c2
1 Parent(s): 9513736

Update utils/model.py

Browse files
Files changed (1) hide show
  1. utils/model.py +46 -35
utils/model.py CHANGED
@@ -5,34 +5,32 @@ from pathlib import Path
5
  from spacy.tokens import DocBin
6
  import random
7
  import shutil
 
8
 
9
- # Load the training data from the .spacy file
10
  def load_data_from_spacy_file(file_path):
11
- # Initialize a blank English model to ensure compatibility
12
  nlp = spacy.blank("en")
13
 
14
- # Load the DocBin object and get documents
15
  try:
16
  doc_bin = DocBin().from_disk(file_path)
17
  docs = list(doc_bin.get_docs(nlp.vocab))
 
18
  return docs
19
  except Exception as e:
20
  print(f"Error loading data from .spacy file: {e}")
21
  return []
22
 
23
-
24
- # Train model function
25
  def train_model(epochs, model_path):
26
- # Initialize a blank English model
27
  nlp = spacy.blank("en")
28
 
29
- # Create an NER component and add it to the pipeline
30
  if "ner" not in nlp.pipe_names:
31
  ner = nlp.add_pipe("ner")
32
-
33
- nlp.add_pipe("sentencizer")
34
 
35
- # Define all possible entity labels
36
  labels = [
37
  "PERSON", "CONTACT", "EMAIL", "ABOUT", "EXPERIENCE", "YEARS_EXPERIENCE",
38
  "UNIVERSITY", "SOFT_SKILL", "INSTITUTE", "LAST_QUALIFICATION_YEAR", "JOB_TITLE",
@@ -40,55 +38,68 @@ def train_model(epochs, model_path):
40
  "LANGUAGE", "LOCATION", "PROJECTS", "SKILL", "CERTIFICATE"
41
  ]
42
 
43
- # Add labels to the NER component
44
  for label in labels:
45
  ner.add_label(label)
46
 
47
- # Load the training data
48
  train_data = load_data_from_spacy_file("./data/Spacy_data.spacy")
49
 
50
- # Start the training
51
- optimizer = nlp.begin_training()
 
 
52
 
 
53
  epoch_losses = []
54
  best_loss = float('inf')
55
 
56
- # Training loop
57
  for epoch in range(epochs):
58
  losses = {}
59
- random.shuffle(train_data) # Shuffle data for better training
60
-
61
- # Create minibatches
62
  batches = minibatch(train_data, size=compounding(4.0, 32.0, 1.001))
63
-
64
  for batch in batches:
65
- texts, annotations = zip(*[(doc.text, {"entities": [(ent.start_char, ent.end_char, ent.label_) for ent in doc.ents]}) for doc in batch])
66
-
67
- # Convert to Example objects
68
- examples = [Example.from_dict(nlp.make_doc(text), annotation) for text, annotation in zip(texts, annotations)]
69
-
 
 
 
 
 
 
 
 
 
70
  # Update the model
71
  nlp.update(examples, sgd=optimizer, drop=0.35, losses=losses)
72
-
 
73
  current_loss = losses.get("ner", float('inf'))
74
  epoch_losses.append(current_loss)
75
-
76
- print(f"Losses at epoch {epoch + 1}: {losses}")
77
-
78
  # Save the best model
79
  if current_loss < best_loss:
80
  best_loss = current_loss
81
- # Save to a temporary path
82
  temp_model_path = model_path + "_temp"
83
  nlp.to_disk(temp_model_path)
84
 
85
- # Use shutil to move the model to the final path
86
  if os.path.exists(model_path):
87
- shutil.rmtree(model_path) # Remove the old model if it exists
88
- shutil.copytree(temp_model_path, model_path) # Copy the temp model to the final path
89
- shutil.rmtree(temp_model_path) # Remove the temporary model directory
90
-
91
- # Final save after training
92
  nlp.to_disk(model_path)
 
93
 
94
  return epoch_losses
 
5
  from spacy.tokens import DocBin
6
  import random
7
  import shutil
8
+ import os
9
 
 
10
  def load_data_from_spacy_file(file_path):
11
+ """Load training data from .spacy file."""
12
  nlp = spacy.blank("en")
13
 
 
14
  try:
15
  doc_bin = DocBin().from_disk(file_path)
16
  docs = list(doc_bin.get_docs(nlp.vocab))
17
+ print(f"Loaded {len(docs)} documents from {file_path}.")
18
  return docs
19
  except Exception as e:
20
  print(f"Error loading data from .spacy file: {e}")
21
  return []
22
 
 
 
23
  def train_model(epochs, model_path):
24
+ """Train NER model."""
25
  nlp = spacy.blank("en")
26
 
27
+ # Add the NER pipeline
28
  if "ner" not in nlp.pipe_names:
29
  ner = nlp.add_pipe("ner")
30
+
31
+ nlp.add_pipe("sentencizer") # Optional component to split sentences
32
 
33
+ # Define entity labels
34
  labels = [
35
  "PERSON", "CONTACT", "EMAIL", "ABOUT", "EXPERIENCE", "YEARS_EXPERIENCE",
36
  "UNIVERSITY", "SOFT_SKILL", "INSTITUTE", "LAST_QUALIFICATION_YEAR", "JOB_TITLE",
 
38
  "LANGUAGE", "LOCATION", "PROJECTS", "SKILL", "CERTIFICATE"
39
  ]
40
 
41
+ # Add the labels to the NER pipeline
42
  for label in labels:
43
  ner.add_label(label)
44
 
45
+ # Load training data
46
  train_data = load_data_from_spacy_file("./data/Spacy_data.spacy")
47
 
48
+ # Verify if data was loaded correctly
49
+ if not train_data:
50
+ print("No training data found. Exiting training.")
51
+ return
52
 
53
+ optimizer = nlp.begin_training()
54
  epoch_losses = []
55
  best_loss = float('inf')
56
 
57
+ # Start training loop
58
  for epoch in range(epochs):
59
  losses = {}
60
+ random.shuffle(train_data) # Shuffle data
61
+
62
+ # Create batches
63
  batches = minibatch(train_data, size=compounding(4.0, 32.0, 1.001))
64
+
65
  for batch in batches:
66
+ # Extract texts and annotations
67
+ try:
68
+ texts, annotations = zip(
69
+ *[(doc.text, {"entities": [(ent.start_char, ent.end_char, ent.label_) for ent in doc.ents]})
70
+ for doc in batch]
71
+ )
72
+ except ValueError as e:
73
+ print(f"Error processing batch: {e}")
74
+ continue
75
+
76
+ # Create Example objects
77
+ examples = [Example.from_dict(nlp.make_doc(text), annotation)
78
+ for text, annotation in zip(texts, annotations)]
79
+
80
  # Update the model
81
  nlp.update(examples, sgd=optimizer, drop=0.35, losses=losses)
82
+
83
+ # Record loss for this epoch
84
  current_loss = losses.get("ner", float('inf'))
85
  epoch_losses.append(current_loss)
86
+
87
+ print(f"Losses at epoch {epoch + 1}: {losses}")
88
+
89
  # Save the best model
90
  if current_loss < best_loss:
91
  best_loss = current_loss
 
92
  temp_model_path = model_path + "_temp"
93
  nlp.to_disk(temp_model_path)
94
 
95
+ # Safely move to the final path
96
  if os.path.exists(model_path):
97
+ shutil.rmtree(model_path)
98
+ shutil.copytree(temp_model_path, model_path)
99
+ shutil.rmtree(temp_model_path)
100
+
101
+ # Save the final model
102
  nlp.to_disk(model_path)
103
+ print(f"Training completed. Final model saved at: {model_path}")
104
 
105
  return epoch_losses