updated tests to match new inference na tf supported model
Gitea Actions For AIIA / Explore-Gitea-Actions (push) Has been cancelled Details

This commit is contained in:
Falko Victor Habel 2025-04-19 22:53:35 +02:00
parent ac3fabd55f
commit 391a03baed
2 changed files with 11 additions and 39 deletions

View File

@ -21,9 +21,8 @@ def real_model(tmp_path):
base_model = AIIABase(config)
# Make sure aiuNN is properly configured with all required attributes
upsampler = aiuNN(base_model, config=ai_config)
# Ensure the upsample attribute is properly set if needed
# upsampler.upsample = ... # Add any necessary initialization
upsampler = aiuNN(config=ai_config)
upsampler.load_base_model(base_model)
# Save the model and config to temporary directory
save_path = str(model_dir / "save")
@ -40,10 +39,10 @@ def real_model(tmp_path):
json.dump(config_data, f)
# Save model
upsampler.save(save_path)
upsampler.save_pretrained(save_path)
# Load model in inference mode
inference_model = aiuNNInference(model_path=save_path, precision='fp16', device='cpu')
inference_model = aiuNNInference(model_path=save_path, device='cpu')
return inference_model
@ -88,12 +87,3 @@ def test_convert_to_binary(inference):
result = inference.convert_to_binary(test_image)
assert isinstance(result, bytes)
assert len(result) > 0
def test_process_batch(inference):
# Create test images
test_array = np.zeros((100, 100, 3), dtype=np.uint8)
test_images = [Image.fromarray(test_array) for _ in range(2)]
results = inference.process_batch(test_images)
assert len(results) == 2
assert all(isinstance(img, Image.Image) for img in results)

View File

@ -10,39 +10,21 @@ def test_save_and_load_model():
config = AIIAConfig()
ai_config = aiuNNConfig()
base_model = AIIABase(config)
upsampler = aiuNN(base_model, config=ai_config)
upsampler = aiuNN(config=ai_config)
upsampler.load_base_model(base_model)
# Save the model
save_path = os.path.join(tmpdirname, "model")
upsampler.save(save_path)
upsampler.save_pretrained(save_path)
# Load the model
loaded_upsampler = aiuNN.load(save_path)
loaded_upsampler = aiuNN.from_pretrained(save_path)
# Verify that the loaded model is the same as the original model
assert isinstance(loaded_upsampler, aiuNN)
assert loaded_upsampler.config.__dict__ == upsampler.config.__dict__
assert loaded_upsampler.config.hidden_size == upsampler.config.hidden_size
assert loaded_upsampler.config._activation_function == upsampler.config._activation_function
assert loaded_upsampler.config.architectures == upsampler.config.architectures
def test_save_and_load_model_with_precision():
# Create a temporary directory to save the model
with tempfile.TemporaryDirectory() as tmpdirname:
# Create configurations and build a base model
config = AIIAConfig()
ai_config = aiuNNConfig()
base_model = AIIABase(config)
upsampler = aiuNN(base_model, config=ai_config)
# Save the model
save_path = os.path.join(tmpdirname, "model")
upsampler.save(save_path)
# Load the model with precision 'bf16'
loaded_upsampler = aiuNN.load(save_path, precision="bf16")
# Verify that the loaded model is the same as the original model
assert isinstance(loaded_upsampler, aiuNN)
assert loaded_upsampler.config.__dict__ == upsampler.config.__dict__
if __name__ == "__main__":
test_save_and_load_model()
test_save_and_load_model_with_precision()