189 lines
7.2 KiB
Python
189 lines
7.2 KiB
Python
"""
|
||
Test Cases for Final Feedback Survey
|
||
|
||
Tests the overall assessment feedback modal functionality:
|
||
- Overall feedback modal
|
||
- Per-question feedback modal
|
||
- Rating selection
|
||
- Comment entry
|
||
- Feedback submission
|
||
"""
|
||
import pytest
|
||
import time
|
||
from pages.feedback_survey_page import FeedbackSurveyPage
|
||
from pages.domains_page import DomainsPage
|
||
|
||
|
||
@pytest.mark.assessment
|
||
@pytest.mark.final_feedback
|
||
class TestFinalFeedback:
|
||
"""Test cases for Final Feedback Survey"""
|
||
|
||
def test_final_feedback_availability(self, assessment_with_domains):
|
||
"""Test checking if final feedback is available"""
|
||
driver = assessment_with_domains['driver']
|
||
domains_page = assessment_with_domains['domains_page']
|
||
|
||
# Check for final feedback modal on domains page
|
||
is_present = domains_page.is_final_feedback_modal_present()
|
||
|
||
if is_present:
|
||
print("✅ Final feedback modal is present on domains page")
|
||
else:
|
||
print("ℹ️ Final feedback modal not present (all domains may not be completed)")
|
||
# Check for feedback survey modal
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
if feedback_survey.is_final_feedback_available():
|
||
print("✅ Final feedback survey modal is available")
|
||
else:
|
||
pytest.skip("Final feedback not available - all domains may need to be completed first")
|
||
|
||
def test_overall_feedback_modal(self, assessment_with_domains):
|
||
"""Test overall feedback modal structure"""
|
||
driver = assessment_with_domains['driver']
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
|
||
if not feedback_survey.is_overall_modal_present():
|
||
pytest.skip("Overall feedback modal not present")
|
||
|
||
# Modal should be visible
|
||
assert feedback_survey.is_element_visible(feedback_survey.OVERALL_MODAL, timeout=5), \
|
||
"Overall feedback modal should be visible"
|
||
|
||
print("✅ Overall feedback modal structure verified")
|
||
|
||
def test_per_question_feedback_modal(self, assessment_with_domains):
|
||
"""Test per-question feedback modal structure"""
|
||
driver = assessment_with_domains['driver']
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
|
||
if not feedback_survey.is_per_question_modal_present():
|
||
pytest.skip("Per-question feedback modal not present")
|
||
|
||
# Modal should be visible
|
||
assert feedback_survey.is_element_visible(feedback_survey.PER_QUESTION_MODAL, timeout=5), \
|
||
"Per-question feedback modal should be visible"
|
||
|
||
# Get feedback questions
|
||
question_ids = feedback_survey.get_feedback_questions()
|
||
|
||
if question_ids:
|
||
print(f"✅ Per-question feedback modal verified with {len(question_ids)} question(s)")
|
||
else:
|
||
print("✅ Per-question feedback modal verified")
|
||
|
||
def test_set_overall_rating(self, assessment_with_domains):
|
||
"""Test setting overall rating"""
|
||
driver = assessment_with_domains['driver']
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
|
||
if not feedback_survey.is_overall_modal_present():
|
||
pytest.skip("Overall feedback modal not present")
|
||
|
||
# Set rating to 4
|
||
feedback_survey.set_overall_rating(4)
|
||
time.sleep(0.5)
|
||
|
||
print("✅ Set overall rating to: 4")
|
||
|
||
def test_set_question_rating(self, assessment_with_domains):
|
||
"""Test setting rating for a specific question"""
|
||
driver = assessment_with_domains['driver']
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
|
||
if not feedback_survey.is_per_question_modal_present():
|
||
pytest.skip("Per-question feedback modal not present")
|
||
|
||
# Get question IDs
|
||
question_ids = feedback_survey.get_feedback_questions()
|
||
if not question_ids:
|
||
pytest.skip("No feedback questions found")
|
||
|
||
# Set rating for first question
|
||
first_question_id = question_ids[0]
|
||
feedback_survey.set_question_rating(first_question_id, 4)
|
||
time.sleep(0.5)
|
||
|
||
print(f"✅ Set rating for question {first_question_id} to: 4")
|
||
|
||
def test_enter_question_comment(self, assessment_with_domains):
|
||
"""Test entering comment for a question"""
|
||
driver = assessment_with_domains['driver']
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
|
||
if not feedback_survey.is_per_question_modal_present():
|
||
pytest.skip("Per-question feedback modal not present")
|
||
|
||
# Get question IDs
|
||
question_ids = feedback_survey.get_feedback_questions()
|
||
if not question_ids:
|
||
pytest.skip("No feedback questions found")
|
||
|
||
# Enter comment for first question
|
||
first_question_id = question_ids[0]
|
||
comment_text = "This is a test comment for automation testing."
|
||
feedback_survey.set_question_comment(first_question_id, comment_text)
|
||
time.sleep(0.5)
|
||
|
||
print(f"✅ Entered comment for question {first_question_id}")
|
||
|
||
def test_submit_overall_feedback(self, assessment_with_domains):
|
||
"""Test submitting overall feedback"""
|
||
driver = assessment_with_domains['driver']
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
|
||
if not feedback_survey.is_overall_modal_present():
|
||
pytest.skip("Overall feedback modal not present")
|
||
|
||
# Set rating
|
||
feedback_survey.set_overall_rating(4)
|
||
time.sleep(0.5)
|
||
|
||
# Submit
|
||
feedback_survey.submit_feedback()
|
||
time.sleep(2)
|
||
|
||
# Verify modal is closed
|
||
assert not feedback_survey.is_overall_modal_present(), \
|
||
"Overall feedback modal should be closed after submission"
|
||
|
||
print("✅ Overall feedback submitted successfully")
|
||
|
||
def test_submit_per_question_feedback(self, assessment_with_domains):
|
||
"""Test submitting per-question feedback"""
|
||
driver = assessment_with_domains['driver']
|
||
feedback_survey = FeedbackSurveyPage(driver)
|
||
|
||
if not feedback_survey.is_per_question_modal_present():
|
||
pytest.skip("Per-question feedback modal not present")
|
||
|
||
# Get question IDs
|
||
question_ids = feedback_survey.get_feedback_questions()
|
||
if not question_ids:
|
||
pytest.skip("No feedback questions found")
|
||
|
||
# Set rating and comment for each question
|
||
for question_id in question_ids[:3]: # Limit to first 3 questions
|
||
try:
|
||
feedback_survey.set_question_rating(question_id, 4)
|
||
feedback_survey.set_question_comment(question_id, f"Test comment for question {question_id}")
|
||
time.sleep(0.3)
|
||
except:
|
||
pass
|
||
|
||
# Submit
|
||
feedback_survey.submit_feedback()
|
||
time.sleep(2)
|
||
|
||
# Verify modal is closed
|
||
assert not feedback_survey.is_per_question_modal_present(), \
|
||
"Per-question feedback modal should be closed after submission"
|
||
|
||
print(f"✅ Per-question feedback submitted for {len(question_ids)} question(s)")
|
||
|
||
|
||
|
||
|
||
|
||
|