455 lines
16 KiB
Python
455 lines
16 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Comprehensive test suite for CAE Mesh Generator
|
|
"""
|
|
import os
|
|
import sys
|
|
import unittest
|
|
import tempfile
|
|
import shutil
|
|
import json
|
|
import time
|
|
from pathlib import Path
|
|
from datetime import datetime
|
|
|
|
# Add project root to path
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
|
|
from backend.models.data_models import UploadedFile, ProcessingStatus, MeshResult
|
|
from backend.utils.file_validator import validate_step_file, get_file_info
|
|
from backend.utils.state_manager import state_manager
|
|
from backend.utils.error_handler import (
|
|
FileUploadError, ANSYSError, MeshGenerationError, ValidationError,
|
|
validate_file_upload, error_reporter
|
|
)
|
|
from backend.utils.resource_manager import resource_manager, file_manager
|
|
from backend.pymechanical.session_manager import ANSYSSessionManager
|
|
|
|
class TestFileUploadAndProcessing(unittest.TestCase):
|
|
"""Test file upload and processing functionality"""
|
|
|
|
def setUp(self):
|
|
"""Set up test environment"""
|
|
self.test_dir = tempfile.mkdtemp()
|
|
self.test_files = []
|
|
|
|
# Create a sample STEP file for testing
|
|
self.sample_step_content = """ISO-10303-21;
|
|
HEADER;
|
|
FILE_DESCRIPTION(('Test STEP file for CAE Mesh Generator'),'2;1');
|
|
FILE_NAME('test_blade.step','2025-01-01T00:00:00',('Test'),('Test'),'','','');
|
|
FILE_SCHEMA(('AUTOMOTIVE_DESIGN'));
|
|
ENDSEC;
|
|
DATA;
|
|
#1 = CARTESIAN_POINT('Origin',(0.0,0.0,0.0));
|
|
#2 = DIRECTION('X-Axis',(1.0,0.0,0.0));
|
|
#3 = DIRECTION('Y-Axis',(0.0,1.0,0.0));
|
|
#4 = DIRECTION('Z-Axis',(0.0,0.0,1.0));
|
|
#5 = AXIS2_PLACEMENT_3D('Coordinate System',#1,#4,#2);
|
|
ENDSEC;
|
|
END-ISO-10303-21;"""
|
|
|
|
self.valid_step_file = os.path.join(self.test_dir, 'test_blade.step')
|
|
with open(self.valid_step_file, 'w') as f:
|
|
f.write(self.sample_step_content)
|
|
|
|
# Create an invalid file
|
|
self.invalid_file = os.path.join(self.test_dir, 'invalid.txt')
|
|
with open(self.invalid_file, 'w') as f:
|
|
f.write('This is not a STEP file')
|
|
|
|
def tearDown(self):
|
|
"""Clean up test environment"""
|
|
shutil.rmtree(self.test_dir, ignore_errors=True)
|
|
state_manager.clear_current_file()
|
|
state_manager.clear_session_data()
|
|
|
|
def test_file_validation(self):
|
|
"""Test file validation functionality"""
|
|
# Test valid STEP file
|
|
is_valid, error = validate_step_file(self.valid_step_file)
|
|
self.assertTrue(is_valid, f"Valid STEP file should pass validation: {error}")
|
|
|
|
# Test invalid file
|
|
is_valid, error = validate_step_file(self.invalid_file)
|
|
self.assertFalse(is_valid, "Invalid file should fail validation")
|
|
|
|
# Test non-existent file
|
|
is_valid, error = validate_step_file('non_existent.step')
|
|
self.assertFalse(is_valid, "Non-existent file should fail validation")
|
|
|
|
def test_file_info_extraction(self):
|
|
"""Test file information extraction"""
|
|
file_info = get_file_info(self.valid_step_file)
|
|
|
|
self.assertIsInstance(file_info, dict)
|
|
self.assertIn('file_size', file_info)
|
|
self.assertIn('file_type', file_info)
|
|
self.assertGreater(file_info['file_size'], 0)
|
|
|
|
def test_uploaded_file_model(self):
|
|
"""Test UploadedFile data model"""
|
|
uploaded_file = UploadedFile(
|
|
id='test-123',
|
|
filename='test_blade.step',
|
|
file_path=self.valid_step_file,
|
|
upload_time=datetime.now(),
|
|
status='UPLOADED'
|
|
)
|
|
|
|
# Test serialization
|
|
file_dict = uploaded_file.to_dict()
|
|
self.assertIsInstance(file_dict, dict)
|
|
self.assertEqual(file_dict['id'], 'test-123')
|
|
self.assertEqual(file_dict['filename'], 'test_blade.step')
|
|
self.assertEqual(file_dict['status'], 'UPLOADED')
|
|
|
|
def test_state_manager(self):
|
|
"""Test state management functionality"""
|
|
# Test initial state
|
|
self.assertIsNone(state_manager.get_current_file())
|
|
self.assertFalse(state_manager.is_ready_for_processing())
|
|
|
|
# Test file setting
|
|
uploaded_file = UploadedFile(
|
|
id='test-123',
|
|
filename='test_blade.step',
|
|
file_path=self.valid_step_file,
|
|
upload_time=datetime.now(),
|
|
status='UPLOADED'
|
|
)
|
|
|
|
state_manager.set_current_file(uploaded_file)
|
|
self.assertIsNotNone(state_manager.get_current_file())
|
|
self.assertTrue(state_manager.is_ready_for_processing())
|
|
|
|
# Test processing status
|
|
state_manager.start_processing("Test processing")
|
|
self.assertTrue(state_manager.is_processing())
|
|
|
|
processing_status = state_manager.get_processing_status()
|
|
self.assertEqual(processing_status.status, 'PROCESSING')
|
|
|
|
# Test completion
|
|
state_manager.complete_processing("Test completed")
|
|
self.assertFalse(state_manager.is_processing())
|
|
|
|
processing_status = state_manager.get_processing_status()
|
|
self.assertEqual(processing_status.status, 'COMPLETED')
|
|
|
|
class TestErrorHandling(unittest.TestCase):
|
|
"""Test error handling functionality"""
|
|
|
|
def test_file_upload_validation(self):
|
|
"""Test file upload validation"""
|
|
# Test with None
|
|
with self.assertRaises(FileUploadError):
|
|
validate_file_upload(None)
|
|
|
|
# Test with mock file object
|
|
class MockFile:
|
|
def __init__(self, filename='', content_length=None):
|
|
self.filename = filename
|
|
self.content_length = content_length
|
|
|
|
# Test empty filename
|
|
with self.assertRaises(FileUploadError):
|
|
validate_file_upload(MockFile(''))
|
|
|
|
# Test invalid extension
|
|
with self.assertRaises(FileUploadError):
|
|
validate_file_upload(MockFile('test.txt'))
|
|
|
|
# Test valid file
|
|
try:
|
|
validate_file_upload(MockFile('test.step'))
|
|
except FileUploadError:
|
|
self.fail("Valid STEP file should not raise FileUploadError")
|
|
|
|
def test_error_reporter(self):
|
|
"""Test error reporting functionality"""
|
|
error_reporter.clear()
|
|
|
|
# Test adding errors
|
|
error_reporter.add_error("Test error", "TEST_ERROR", {"detail": "test"})
|
|
self.assertTrue(error_reporter.has_errors())
|
|
|
|
# Test adding warnings
|
|
error_reporter.add_warning("Test warning", {"detail": "test"})
|
|
self.assertTrue(error_reporter.has_warnings())
|
|
|
|
# Test report generation
|
|
report = error_reporter.get_report()
|
|
self.assertEqual(report['error_count'], 1)
|
|
self.assertEqual(report['warning_count'], 1)
|
|
|
|
# Test clearing
|
|
error_reporter.clear()
|
|
self.assertFalse(error_reporter.has_errors())
|
|
self.assertFalse(error_reporter.has_warnings())
|
|
|
|
class TestResourceManagement(unittest.TestCase):
|
|
"""Test resource management functionality"""
|
|
|
|
def setUp(self):
|
|
"""Set up test environment"""
|
|
self.test_dir = tempfile.mkdtemp()
|
|
|
|
def tearDown(self):
|
|
"""Clean up test environment"""
|
|
shutil.rmtree(self.test_dir, ignore_errors=True)
|
|
|
|
def test_temp_file_management(self):
|
|
"""Test temporary file management"""
|
|
# Create a temporary file
|
|
temp_file = file_manager.create_temp_file('.step', 'test_', self.test_dir)
|
|
|
|
self.assertTrue(os.path.exists(temp_file))
|
|
self.assertTrue(temp_file.endswith('.step'))
|
|
|
|
# Test cleanup
|
|
resource_manager.register_temp_file(temp_file)
|
|
cleanup_results = resource_manager.cleanup_temp_files()
|
|
|
|
self.assertGreaterEqual(cleanup_results['cleaned'], 0)
|
|
|
|
def test_temp_directory_management(self):
|
|
"""Test temporary directory management"""
|
|
# Create a temporary directory
|
|
temp_dir = file_manager.create_temp_directory('test_', self.test_dir)
|
|
|
|
self.assertTrue(os.path.exists(temp_dir))
|
|
self.assertTrue(os.path.isdir(temp_dir))
|
|
|
|
# Test cleanup
|
|
resource_manager.register_temp_directory(temp_dir)
|
|
cleanup_results = resource_manager.cleanup_temp_directories()
|
|
|
|
self.assertGreaterEqual(cleanup_results['cleaned'], 0)
|
|
|
|
def test_resource_status(self):
|
|
"""Test resource status reporting"""
|
|
status = resource_manager.get_resource_status()
|
|
|
|
self.assertIsInstance(status, dict)
|
|
self.assertIn('temp_files_count', status)
|
|
self.assertIn('temp_directories_count', status)
|
|
self.assertIn('ansys_sessions_count', status)
|
|
|
|
class TestANSYSIntegration(unittest.TestCase):
|
|
"""Test ANSYS integration functionality"""
|
|
|
|
def test_session_manager_simulation_mode(self):
|
|
"""Test ANSYS session manager in simulation mode"""
|
|
session_manager = ANSYSSessionManager(simulation_mode=True)
|
|
|
|
# Test session startup
|
|
success = session_manager.start_session()
|
|
self.assertTrue(success, "Simulation session should start successfully")
|
|
self.assertTrue(session_manager.is_session_active)
|
|
|
|
# Test session info
|
|
session_info = session_manager.get_session_info()
|
|
self.assertIsInstance(session_info, dict)
|
|
self.assertTrue(session_info['is_active'])
|
|
self.assertTrue(session_info['simulation_mode'])
|
|
|
|
# Test session closure
|
|
success = session_manager.close_session()
|
|
self.assertTrue(success, "Session should close successfully")
|
|
self.assertFalse(session_manager.is_session_active)
|
|
|
|
def test_context_manager(self):
|
|
"""Test ANSYS session manager as context manager"""
|
|
with ANSYSSessionManager(simulation_mode=True) as session:
|
|
self.assertTrue(session.is_session_active)
|
|
|
|
# Session should be closed after context exit
|
|
self.assertFalse(session.is_session_active)
|
|
|
|
class TestMeshQuality(unittest.TestCase):
|
|
"""Test mesh quality checking functionality"""
|
|
|
|
def test_mesh_result_model(self):
|
|
"""Test MeshResult data model"""
|
|
mesh_result = MeshResult(
|
|
element_count=10000,
|
|
node_count=15000,
|
|
quality_score=85.5,
|
|
quality_status='GOOD',
|
|
generation_time=120.5
|
|
)
|
|
|
|
# Test serialization
|
|
result_dict = mesh_result.to_dict()
|
|
self.assertIsInstance(result_dict, dict)
|
|
self.assertEqual(result_dict['element_count'], 10000)
|
|
self.assertEqual(result_dict['node_count'], 15000)
|
|
self.assertEqual(result_dict['quality_score'], 85.5)
|
|
self.assertEqual(result_dict['quality_status'], 'GOOD')
|
|
|
|
def test_quality_validation(self):
|
|
"""Test mesh quality validation"""
|
|
# Test good quality mesh
|
|
good_result = MeshResult(
|
|
element_count=10000,
|
|
node_count=15000,
|
|
quality_score=85.5,
|
|
quality_status='GOOD',
|
|
generation_time=120.5
|
|
)
|
|
|
|
self.assertGreater(good_result.quality_score, 50)
|
|
self.assertEqual(good_result.quality_status, 'GOOD')
|
|
|
|
# Test poor quality mesh
|
|
poor_result = MeshResult(
|
|
element_count=5000,
|
|
node_count=7500,
|
|
quality_score=25.0,
|
|
quality_status='POOR',
|
|
generation_time=60.0
|
|
)
|
|
|
|
self.assertLess(poor_result.quality_score, 50)
|
|
self.assertEqual(poor_result.quality_status, 'POOR')
|
|
|
|
class TestIntegrationWorkflow(unittest.TestCase):
|
|
"""Test complete integration workflow"""
|
|
|
|
def setUp(self):
|
|
"""Set up test environment"""
|
|
self.test_dir = tempfile.mkdtemp()
|
|
|
|
# Create a sample STEP file
|
|
self.sample_step_content = """ISO-10303-21;
|
|
HEADER;
|
|
FILE_DESCRIPTION(('Test STEP file'),'2;1');
|
|
FILE_NAME('integration_test.step','2025-01-01T00:00:00',('Test'),('Test'),'','','');
|
|
FILE_SCHEMA(('AUTOMOTIVE_DESIGN'));
|
|
ENDSEC;
|
|
DATA;
|
|
#1 = CARTESIAN_POINT('Origin',(0.0,0.0,0.0));
|
|
ENDSEC;
|
|
END-ISO-10303-21;"""
|
|
|
|
self.test_file = os.path.join(self.test_dir, 'integration_test.step')
|
|
with open(self.test_file, 'w') as f:
|
|
f.write(self.sample_step_content)
|
|
|
|
def tearDown(self):
|
|
"""Clean up test environment"""
|
|
shutil.rmtree(self.test_dir, ignore_errors=True)
|
|
state_manager.clear_current_file()
|
|
state_manager.clear_session_data()
|
|
|
|
def test_complete_workflow_simulation(self):
|
|
"""Test complete workflow in simulation mode"""
|
|
# Step 1: File upload and validation
|
|
is_valid, error = validate_step_file(self.test_file)
|
|
self.assertTrue(is_valid, f"File validation failed: {error}")
|
|
|
|
# Step 2: Create uploaded file record
|
|
uploaded_file = UploadedFile(
|
|
id='integration-test',
|
|
filename='integration_test.step',
|
|
file_path=self.test_file,
|
|
upload_time=datetime.now(),
|
|
status='UPLOADED'
|
|
)
|
|
|
|
state_manager.set_current_file(uploaded_file)
|
|
self.assertTrue(state_manager.is_ready_for_processing())
|
|
|
|
# Step 3: Start processing
|
|
state_manager.start_processing("Integration test processing")
|
|
self.assertTrue(state_manager.is_processing())
|
|
|
|
# Step 4: Simulate ANSYS session
|
|
with ANSYSSessionManager(simulation_mode=True) as session:
|
|
self.assertTrue(session.is_session_active)
|
|
|
|
# Simulate geometry import
|
|
import_success = session.import_geometry(self.test_file)
|
|
self.assertTrue(import_success, "Geometry import should succeed in simulation")
|
|
|
|
# Simulate mesh generation
|
|
mesh_result = session.generate_mesh()
|
|
self.assertIsInstance(mesh_result, dict)
|
|
self.assertTrue(mesh_result.get('success', False))
|
|
|
|
# Step 5: Complete processing
|
|
final_result = MeshResult(
|
|
element_count=8500,
|
|
node_count=12000,
|
|
quality_score=78.5,
|
|
quality_status='GOOD',
|
|
generation_time=95.0
|
|
)
|
|
|
|
state_manager.set_mesh_result(final_result)
|
|
state_manager.complete_processing("Integration test completed")
|
|
|
|
# Step 6: Verify final state
|
|
self.assertFalse(state_manager.is_processing())
|
|
|
|
processing_status = state_manager.get_processing_status()
|
|
self.assertEqual(processing_status.status, 'COMPLETED')
|
|
|
|
mesh_result = state_manager.get_mesh_result()
|
|
self.assertIsNotNone(mesh_result)
|
|
self.assertEqual(mesh_result.element_count, 8500)
|
|
self.assertEqual(mesh_result.quality_status, 'GOOD')
|
|
|
|
def run_test_suite():
|
|
"""Run the complete test suite"""
|
|
print("=" * 60)
|
|
print("CAE Mesh Generator - Test Suite")
|
|
print("=" * 60)
|
|
|
|
# Create test suite
|
|
test_suite = unittest.TestSuite()
|
|
|
|
# Add test cases
|
|
test_classes = [
|
|
TestFileUploadAndProcessing,
|
|
TestErrorHandling,
|
|
TestResourceManagement,
|
|
TestANSYSIntegration,
|
|
TestMeshQuality,
|
|
TestIntegrationWorkflow
|
|
]
|
|
|
|
for test_class in test_classes:
|
|
tests = unittest.TestLoader().loadTestsFromTestCase(test_class)
|
|
test_suite.addTests(tests)
|
|
|
|
# Run tests
|
|
runner = unittest.TextTestRunner(verbosity=2)
|
|
result = runner.run(test_suite)
|
|
|
|
# Print summary
|
|
print("\n" + "=" * 60)
|
|
print("Test Results Summary")
|
|
print("=" * 60)
|
|
print(f"Tests run: {result.testsRun}")
|
|
print(f"Failures: {len(result.failures)}")
|
|
print(f"Errors: {len(result.errors)}")
|
|
print(f"Success rate: {((result.testsRun - len(result.failures) - len(result.errors)) / result.testsRun * 100):.1f}%")
|
|
|
|
if result.failures:
|
|
print("\nFailures:")
|
|
for test, traceback in result.failures:
|
|
print(f"- {test}: {traceback.split('AssertionError: ')[-1].split('\\n')[0]}")
|
|
|
|
if result.errors:
|
|
print("\nErrors:")
|
|
for test, traceback in result.errors:
|
|
print(f"- {test}: {traceback.split('\\n')[-2]}")
|
|
|
|
# Return success status
|
|
return len(result.failures) == 0 and len(result.errors) == 0
|
|
|
|
if __name__ == '__main__':
|
|
success = run_test_suite()
|
|
sys.exit(0 if success else 1) |