Skip to content

Commit

Permalink
Merge pull request #81 from ADORSYS-GIS/52-testing-the-scraping-funct…
Browse files Browse the repository at this point in the history
…ionality

feature(backend) unit and intergrated test on scraping
  • Loading branch information
Bansikah authored Nov 24, 2023
2 parents 9cf9bad + 2bdec94 commit 2a0da92
Show file tree
Hide file tree
Showing 2 changed files with 97 additions and 0 deletions.
43 changes: 43 additions & 0 deletions tests/test_intergrated_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By

class TestIntegration(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.get('http://localhost:5000')
self.wait = WebDriverWait(self.driver, 10)

def tearDown(self):
self.driver.quit()

def test_full_flow(self):
# Simulating user input for the URL
user_input_url = input("Enter the website URL: ")

# Input Webpage URLs
url_input = self.driver.find_element_by_name('url')
url_input.send_keys(user_input_url)

depth_input = self.driver.find_element_by_name('depth')
depth_input.send_keys('1')

data_select = self.driver.find_element_by_name('data_to_look_for')
#since python is an option in the drop down menu from the user interface
data_select.send_keys('python')

scrape_button = self.driver.find_element_by_css_selector('.input-form button')
scrape_button.click()

# Wait for the results page to load
self.wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '.results ul li')))

# Verify the scraped data on the results page
scraped_data = self.driver.find_elements_by_css_selector('.results ul li')
self.assertTrue(scraped_data) # Assert that scraped data is present

if __name__ == '__main__':
unittest.main()
54 changes: 54 additions & 0 deletions tests/test_scraping.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import unittest
from unittest.mock import patch, Mock
from scraping import scrape_website
import requests

class TestScraping(unittest.TestCase):

def input_url(self):
return input("Enter the website URL: ")

@patch('builtins.input', side_effect=['http://example.com'])
@patch('requests.get')
def test_successful_scrape(self, mock_requests_get, mock_input):
# Arrange
mock_response = Mock()
mock_response.status_code = 200
mock_response.content = '<html><body><p>Test content</p></body></html>'
mock_requests_get.return_value = mock_response

# Act
result = scrape_website(self.input_url())

# Assert
self.assertEqual(result, ['Test content'])

@patch('builtins.input', side_effect=['http://example.com'])
@patch('requests.get', side_effect=requests.HTTPError('HTTP Error'))
def test_http_error(self, mock_requests_get, mock_input):
# Act
result = scrape_website(self.input_url())

# Assert
self.assertIsNone(result)

@patch('builtins.input', side_effect=['http://example.com'])
@patch('requests.get', side_effect=requests.RequestException('Request Exception'))
def test_request_exception(self, mock_requests_get, mock_input):
# Act
result = scrape_website(self.input_url())

# Assert
self.assertIsNone(result)

@patch('builtins.input', side_effect=['http://example.com'])
@patch('requests.get', side_effect=Exception('Some Other Exception'))
def test_other_exception(self, mock_requests_get, mock_input):
# Act
result = scrape_website(self.input_url())

# Assert
self.assertIsNone(result)

if __name__ == '__main__':
unittest.main()

0 comments on commit 2a0da92

Please sign in to comment.