Skip to content

Commit

Permalink
added comments to main test file
Browse files Browse the repository at this point in the history
  • Loading branch information
jshuadvd committed Jun 12, 2024
1 parent 985e6e6 commit 37ba704
Showing 1 changed file with 6 additions and 0 deletions.
6 changes: 6 additions & 0 deletions tests/test_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@ def test_longrope_model_forward():
assert not torch.equal(input_ids, output)


# Testing the extend_context function
def test_extend_context():
model = LongRoPEModel(
d_model=512, n_heads=8, num_layers=6, vocab_size=50257, max_len=65536
Expand All @@ -103,6 +104,7 @@ def test_extend_context():
assert extended_model.max_len == 2048000


# Testing the recover_short_context function
def test_recover_short_context():
model = LongRoPEModel(
d_model=512, n_heads=8, num_layers=6, vocab_size=50257, max_len=65536
Expand All @@ -117,6 +119,7 @@ def test_recover_short_context():
assert recovered_model.max_len == 65536


# Testing the progressive_extension function
def test_progressive_extension():
model = LongRoPEModel(
d_model=512, n_heads=8, num_layers=6, vocab_size=50257, max_len=65536
Expand Down Expand Up @@ -146,6 +149,7 @@ def test_progressive_extension():
assert extended_model.max_len == 2048000


# Testing the short_context_recovery function
def test_short_context_recovery():
model = LongRoPEModel(
d_model=512, n_heads=8, num_layers=6, vocab_size=50257, max_len=65536
Expand All @@ -154,6 +158,7 @@ def test_short_context_recovery():
short_context_recovery(model, tokenizer)


# Testing the longrope_model_forward_with_extended_context function
def test_longrope_model_forward_with_extended_context():
model = LongRoPEModel(
d_model=512, n_heads=8, num_layers=6, vocab_size=50257, max_len=2048000
Expand All @@ -164,6 +169,7 @@ def test_longrope_model_forward_with_extended_context():
assert not torch.equal(input_ids, output)


# Testing the longrope_model_forward_with_short_context function
def test_longrope_model_forward_with_short_context():
model = LongRoPEModel(
d_model=512, n_heads=8, num_layers=6, vocab_size=50257, max_len=4096
Expand Down

0 comments on commit 37ba704

Please sign in to comment.