diff --git a/tests/unittest/test_get_max_tokens.py b/tests/unittest/test_get_max_tokens.py new file mode 100644 index 00000000..fe2f6685 --- /dev/null +++ b/tests/unittest/test_get_max_tokens.py @@ -0,0 +1,67 @@ +import pytest +from pr_agent.algo.utils import get_max_tokens, MAX_TOKENS +import pr_agent.algo.utils as utils + +class TestGetMaxTokens: + + # Test if the file is in MAX_TOKENS + def test_model_max_tokens(self, monkeypatch): + fake_settings = type('', (), { + 'config': type('', (), { + 'custom_model_max_tokens': 0, + 'max_model_tokens': 0 + })() + })() + + monkeypatch.setattr(utils, "get_settings", lambda: fake_settings) + + model = "gpt-3.5-turbo" + expected = MAX_TOKENS[model] + + assert get_max_tokens(model) == expected + + # Test situations where the model is not registered and exists as a custom model + def test_model_has_custom(self, monkeypatch): + fake_settings = type('', (), { + 'config': type('', (), { + 'custom_model_max_tokens': 5000, + 'max_model_tokens': 0 # 제한 없음 + })() + })() + + monkeypatch.setattr(utils, "get_settings", lambda: fake_settings) + + model = "custom-model" + expected = 5000 + + assert get_max_tokens(model) == expected + + def test_model_not_max_tokens_and_not_has_custom(self, monkeypatch): + fake_settings = type('', (), { + 'config': type('', (), { + 'custom_model_max_tokens': 0, + 'max_model_tokens': 0 + })() + })() + + monkeypatch.setattr(utils, "get_settings", lambda: fake_settings) + + model = "custom-model" + + with pytest.raises(Exception): + get_max_tokens(model) + + def test_model_max_tokens_with__limit(self, monkeypatch): + fake_settings = type('', (), { + 'config': type('', (), { + 'custom_model_max_tokens': 0, + 'max_model_tokens': 10000 + })() + })() + + monkeypatch.setattr(utils, "get_settings", lambda: fake_settings) + + model = "gpt-3.5-turbo" # this model setting is 160000 + expected = 10000 + + assert get_max_tokens(model) == expected