Skip to content

Commit

Permalink
final finetune work before break
Browse files Browse the repository at this point in the history
  • Loading branch information
SoufianeNoubir committed Oct 3, 2024
1 parent 87c5075 commit be6b3c2
Showing 1 changed file with 25 additions and 3 deletions.
28 changes: 25 additions & 3 deletions gbmi/exp_indhead/finetunebound.py
Original file line number Diff line number Diff line change
Expand Up @@ -574,7 +574,7 @@ def total_bound(b, i_1, i_2, dic):

# %%
optimiser = torch.optim.AdamW(
model_1.parameters(), lr=31e-4, betas=(0.9, 0.999), weight_decay=1.0
model_1.parameters(), lr=5e-3, betas=(0.9, 0.999), weight_decay=1.0
)

counter = 0
Expand Down Expand Up @@ -628,10 +628,32 @@ def total_bound(b, i_1, i_2, dic):


# %%
counter = 0
optimiser = torch.optim.AdamW(
model_1.parameters(), lr=5e-1, betas=(0.9, 0.999), weight_decay=1.0
)

a = loss_bound(model_1, 3, 8)[4]
loss = 1 - a[a != 0].mean()
for i in range(10):
print(1 - loss)
for i in range(1):
print(a[a != 0].mean())
loss.backward()
optimiser.step()
optimiser.zero_grad()
a = loss_bound(model_1, 3, 8)[4][5]
loss = 1 - a[a != 0].mean()
counter += 1
print(counter)


optimiser = torch.optim.AdamW(
model_1.parameters(), lr=5e-3, betas=(0.9, 0.999), weight_decay=1.0
)

a = loss_bound(model_1, 3, 8)[4]
loss = 1 - a[a != 0].mean()
for i in range(30):
print(a[a != 0].mean())
loss.backward()
optimiser.step()
optimiser.zero_grad()
Expand Down

0 comments on commit be6b3c2

Please sign in to comment.