Skip to content

Commit

Permalink
Merge pull request #711 from Dominic789654/update_lisa_code
Browse files Browse the repository at this point in the history
Update lisa code
  • Loading branch information
research4pan authored Mar 27, 2024
2 parents 3bb0fb1 + a9d4e10 commit a50288d
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions src/lmflow/pipeline/finetuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -311,8 +311,7 @@ def __init__(self, n_layers, interval_steps, model):
self.layers_attribute = 'model.transformer.h' # General access path
self.total_layers = len(eval('self.' + self.layers_attribute)) # Dynamically execute to get the number of layers

# Freeze all layers upon initialization
self.freeze_all_layers()
self.switch_active_layers()
self.active_layers_indices = []

def freeze_all_layers(self):
Expand All @@ -323,7 +322,7 @@ def freeze_all_layers(self):

def on_step_begin(self, args, state, control, **kwargs):
# Check if it's time to switch active layers, including at step 0
if state.global_step % self.interval_steps == 0 or state.global_step == 1:
if state.global_step % self.interval_steps == 0 :
self.switch_active_layers()

def switch_active_layers(self):
Expand Down

0 comments on commit a50288d

Please sign in to comment.