219 lines
8.1 KiB
Python
219 lines
8.1 KiB
Python
"""
|
||
Test Cases for Domain Feedback Page
|
||
|
||
Tests the per-domain feedback modal functionality:
|
||
- Modal appearance after domain completion
|
||
- Question 1 (Yes/No with reason)
|
||
- Question 2 (Textarea)
|
||
- Submit feedback
|
||
- Skip feedback
|
||
- Navigation after feedback
|
||
"""
|
||
import pytest
|
||
import time
|
||
from pages.domain_feedback_page import DomainFeedbackPage
|
||
from pages.domain_assessment_page import DomainAssessmentPage
|
||
from pages.domains_page import DomainsPage
|
||
|
||
|
||
@pytest.fixture(scope="function")
|
||
def domain_completed_for_feedback(assessment_with_domains):
|
||
"""
|
||
Fixture that completes a domain assessment to trigger feedback modal
|
||
|
||
NOTE: This is a simplified version - in real scenario, would answer all questions
|
||
"""
|
||
domains_page = assessment_with_domains['domains_page']
|
||
driver = assessment_with_domains['driver']
|
||
domain_ids = assessment_with_domains['domain_ids']
|
||
|
||
if not domain_ids:
|
||
pytest.skip("No domains available")
|
||
|
||
# Find first unlocked domain
|
||
unlocked_domain_id = None
|
||
for domain_id in domain_ids:
|
||
if domains_page.is_domain_unlocked(domain_id):
|
||
unlocked_domain_id = domain_id
|
||
break
|
||
|
||
if not unlocked_domain_id:
|
||
pytest.skip("No unlocked domains available")
|
||
|
||
# Start domain assessment
|
||
domains_page.click_domain_action(unlocked_domain_id)
|
||
time.sleep(2)
|
||
|
||
domain_assessment = DomainAssessmentPage(driver)
|
||
domain_assessment.wait_for_page_load()
|
||
domain_assessment.dismiss_guidance()
|
||
time.sleep(1)
|
||
|
||
# NOTE: In a real scenario, we would answer all questions here
|
||
# For now, we'll just check if feedback modal appears
|
||
# This test assumes the domain has been completed (manually or by another test)
|
||
|
||
return {
|
||
'driver': driver,
|
||
'domain_assessment': domain_assessment,
|
||
'domain_id': unlocked_domain_id,
|
||
'domains_page': domains_page
|
||
}
|
||
|
||
|
||
@pytest.mark.assessment
|
||
@pytest.mark.domain_feedback
|
||
class TestDomainFeedback:
|
||
"""Test cases for Domain Feedback Page"""
|
||
|
||
def test_feedback_modal_detection(self, domain_completed_for_feedback):
|
||
"""Test that feedback modal can be detected"""
|
||
driver = domain_completed_for_feedback['driver']
|
||
domain_feedback = DomainFeedbackPage(driver)
|
||
|
||
# Check if modal is present (may not be if domain not completed)
|
||
is_present = domain_feedback.is_modal_present()
|
||
|
||
if is_present:
|
||
print("✅ Domain feedback modal is present")
|
||
else:
|
||
print("ℹ️ Domain feedback modal not present (domain may not be completed yet)")
|
||
pytest.skip("Domain feedback modal not present - domain may need to be completed first")
|
||
|
||
def test_feedback_modal_structure(self, domain_completed_for_feedback):
|
||
"""Test that feedback modal has proper structure"""
|
||
driver = domain_completed_for_feedback['driver']
|
||
domain_feedback = DomainFeedbackPage(driver)
|
||
|
||
if not domain_feedback.is_modal_present():
|
||
pytest.skip("Domain feedback modal not present")
|
||
|
||
# Check for question 1 yes/no
|
||
has_yes_no = domain_feedback.has_question1_yes_no()
|
||
|
||
# Check for question 1 reason textarea
|
||
has_reason = domain_feedback.has_question1_reason()
|
||
|
||
# Check for question 2 textarea
|
||
has_question2 = domain_feedback.has_question2_textarea()
|
||
|
||
# At least one question should be present
|
||
assert has_yes_no or has_reason or has_question2, \
|
||
"Feedback modal should have at least one question"
|
||
|
||
print(f"✅ Feedback modal structure verified:")
|
||
print(f" Question 1 (Yes/No): {'✅' if has_yes_no else '❌'}")
|
||
print(f" Question 1 (Reason): {'✅' if has_reason else '❌'}")
|
||
print(f" Question 2 (Textarea): {'✅' if has_question2 else '❌'}")
|
||
|
||
def test_answer_question1_yes(self, domain_completed_for_feedback):
|
||
"""Test answering question 1 with Yes"""
|
||
driver = domain_completed_for_feedback['driver']
|
||
domain_feedback = DomainFeedbackPage(driver)
|
||
|
||
if not domain_feedback.is_modal_present():
|
||
pytest.skip("Domain feedback modal not present")
|
||
|
||
if not domain_feedback.has_question1_yes_no():
|
||
pytest.skip("Question 1 does not have Yes/No options")
|
||
|
||
# Answer Yes
|
||
domain_feedback.select_question1_yes()
|
||
time.sleep(0.5)
|
||
|
||
print("✅ Answered question 1 with: Yes")
|
||
|
||
def test_enter_question1_reason(self, domain_completed_for_feedback):
|
||
"""Test entering reason for question 1"""
|
||
driver = domain_completed_for_feedback['driver']
|
||
domain_feedback = DomainFeedbackPage(driver)
|
||
|
||
if not domain_feedback.is_modal_present():
|
||
pytest.skip("Domain feedback modal not present")
|
||
|
||
if not domain_feedback.has_question1_reason():
|
||
pytest.skip("Question 1 does not have reason textarea")
|
||
|
||
# Enter reason
|
||
reason_text = "This is a test reason for automation testing."
|
||
domain_feedback.enter_question1_reason(reason_text)
|
||
time.sleep(0.5)
|
||
|
||
print(f"✅ Entered question 1 reason: {reason_text[:50]}...")
|
||
|
||
def test_enter_question2_text(self, domain_completed_for_feedback):
|
||
"""Test entering text for question 2"""
|
||
driver = domain_completed_for_feedback['driver']
|
||
domain_feedback = DomainFeedbackPage(driver)
|
||
|
||
if not domain_feedback.is_modal_present():
|
||
pytest.skip("Domain feedback modal not present")
|
||
|
||
if not domain_feedback.has_question2_textarea():
|
||
pytest.skip("Question 2 textarea not available")
|
||
|
||
# Enter text
|
||
question2_text = "This is a test response for question 2 in domain feedback."
|
||
domain_feedback.enter_question2_text(question2_text)
|
||
time.sleep(0.5)
|
||
|
||
print(f"✅ Entered question 2 text: {question2_text[:50]}...")
|
||
|
||
def test_submit_feedback(self, domain_completed_for_feedback):
|
||
"""Test submitting domain feedback"""
|
||
driver = domain_completed_for_feedback['driver']
|
||
domain_feedback = DomainFeedbackPage(driver)
|
||
|
||
if not domain_feedback.is_modal_present():
|
||
pytest.skip("Domain feedback modal not present")
|
||
|
||
# Fill feedback
|
||
question1_yes = True
|
||
question1_text = "Test reason for automation."
|
||
question2_text = "Test response for question 2."
|
||
|
||
# Submit feedback
|
||
domain_feedback.submit_feedback(
|
||
question1_yes=question1_yes,
|
||
question1_text=question1_text,
|
||
question2_text=question2_text
|
||
)
|
||
time.sleep(2)
|
||
|
||
# Verify modal is closed and navigated to domains page
|
||
assert not domain_feedback.is_modal_present(), \
|
||
"Feedback modal should be closed after submission"
|
||
assert "/domains" in driver.current_url, \
|
||
f"Should navigate to domains page. Current URL: {driver.current_url}"
|
||
|
||
print("✅ Domain feedback submitted successfully")
|
||
print(f" Navigated to: {driver.current_url}")
|
||
|
||
def test_skip_feedback(self, domain_completed_for_feedback):
|
||
"""Test skipping domain feedback"""
|
||
driver = domain_completed_for_feedback['driver']
|
||
domain_feedback = DomainFeedbackPage(driver)
|
||
|
||
# Reload page to get feedback modal again (if needed)
|
||
# For this test, we'll assume modal is present
|
||
if not domain_feedback.is_modal_present():
|
||
pytest.skip("Domain feedback modal not present - cannot test skip")
|
||
|
||
# Skip feedback
|
||
domain_feedback.skip_feedback()
|
||
time.sleep(2)
|
||
|
||
# Verify modal is closed and navigated to domains page
|
||
assert not domain_feedback.is_modal_present(), \
|
||
"Feedback modal should be closed after skipping"
|
||
assert "/domains" in driver.current_url, \
|
||
f"Should navigate to domains page. Current URL: {driver.current_url}"
|
||
|
||
print("✅ Domain feedback skipped successfully")
|
||
|
||
|
||
|
||
|
||
|
||
|