# WDS Test Scenario Template # Save to: test-scenarios/TS-XXX-name.yaml test_scenario: id: "TS-XXX" # Format: TS-001, TS-002, etc. name: "Feature Testing" # Human-readable name delivery_id: "DD-XXX" # Related Design Delivery type: "user_acceptance" # user_acceptance | integration | e2e status: "ready" # ready | in_progress | blocked tester: "designer" # designer | qa | developer created_at: "YYYY-MM-DDTHH:MM:SSZ" test_objectives: - "Validate implementation matches design specifications" - "Verify user flow is intuitive and smooth" - "Confirm all edge cases are handled" - "Ensure design system components are used correctly" - "Test accessibility and usability" test_environment: devices: - "Device 1 (OS version)" - "Device 2 (OS version)" test_data: - field: "value" - field: "value" # Happy Path Tests happy_path: - id: "HP-001" name: "Main User Flow" priority: "critical" # critical | high | medium | low steps: - action: "[User action]" expected: "[Expected result]" design_ref: "[Path to specification]#[section]" - action: "[User action]" expected: "[Expected result]" design_ref: "[Path to specification]#[section]" success_criteria: - "[Success criterion 1]" - "[Success criterion 2]" - "[Success criterion 3]" # Error State Tests error_states: - id: "ES-001" name: "Error Scenario" priority: "high" steps: - action: "[Action that triggers error]" - expected: "[Expected error message]" - expected: "[Expected recovery option]" - design_ref: "[Path to specification]#[error-section]" success_criteria: - "[Error handling criterion 1]" - "[Error handling criterion 2]" # Edge Case Tests edge_cases: - id: "EC-001" name: "Edge Case Scenario" priority: "medium" steps: - action: "[Unusual action]" - expected: "[Expected handling]" - design_ref: "[Path to specification]#[edge-case-section]" success_criteria: - "[Edge case criterion 1]" # Design System Validation design_system_checks: - id: "DS-001" name: "Component Validation" checks: - component: "Component Name" instances: ["Location 1", "Location 2"] verify: - "[Visual property 1]" - "[Visual property 2]" - "[State behavior 1]" design_ref: "D-Design-System/path/to/component.md" # Accessibility Tests accessibility: - id: "A11Y-001" name: "Screen Reader Navigation" priority: "high" setup: "Enable screen reader (VoiceOver/TalkBack)" steps: - action: "[Navigate with screen reader]" - verify: - "[Accessibility check 1]" - "[Accessibility check 2]" success_criteria: - "[Accessibility criterion 1]" - "[Accessibility criterion 2]" # Usability Tests usability: - id: "UX-001" name: "First Impression" type: "observational" instructions: | [Instructions for conducting usability test] success_criteria: - "[Usability criterion 1]" - "[Usability criterion 2]" # Performance Tests performance: - id: "PERF-001" name: "Performance Check" verify: - "[Performance metric 1]" - "[Performance metric 2]" success_criteria: - "[Performance target 1]" - "[Performance target 2]" # Test Report Template report_template: sections: - name: "Test Summary" fields: - "Date tested" - "Tester name" - "Device tested" - "Build version" - "Overall result (Pass/Fail/Partial)" - name: "Happy Path Results" fields: - "Test ID" - "Result (Pass/Fail)" - "Notes" - "Screenshots" - name: "Issues Found" fields: - "Issue ID" - "Severity (Critical/High/Medium/Low)" - "Description" - "Steps to reproduce" - "Expected vs Actual" - "Screenshot/Video" - "Design reference violated" - name: "Design System Compliance" fields: - "Component" - "Compliant (Yes/No)" - "Deviations noted" - name: "Recommendations" fields: - "What worked well" - "What needs improvement" - "Suggested changes" # Sign-off Criteria sign_off: required_for_approval: - "All critical tests pass" - "No critical or high severity issues" - "Design system compliance > 95%" - "Accessibility tests pass" - "Usability metrics meet targets" designer_approval: statement: | I confirm that the implemented feature matches the design specifications and meets the quality standards defined in this test scenario. signature: "________________" date: "________________"