-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #81 from ADORSYS-GIS/52-testing-the-scraping-funct…
…ionality feature(backend) unit and intergrated test on scraping
- Loading branch information
Showing
2 changed files
with
97 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,43 @@ | ||
import unittest | ||
from selenium import webdriver | ||
from selenium.webdriver.common.keys import Keys | ||
from selenium.webdriver.support.ui import WebDriverWait | ||
from selenium.webdriver.support import expected_conditions as EC | ||
from selenium.webdriver.common.by import By | ||
|
||
class TestIntegration(unittest.TestCase): | ||
def setUp(self): | ||
self.driver = webdriver.Chrome() | ||
self.driver.get('http://localhost:5000') | ||
self.wait = WebDriverWait(self.driver, 10) | ||
|
||
def tearDown(self): | ||
self.driver.quit() | ||
|
||
def test_full_flow(self): | ||
# Simulating user input for the URL | ||
user_input_url = input("Enter the website URL: ") | ||
|
||
# Input Webpage URLs | ||
url_input = self.driver.find_element_by_name('url') | ||
url_input.send_keys(user_input_url) | ||
|
||
depth_input = self.driver.find_element_by_name('depth') | ||
depth_input.send_keys('1') | ||
|
||
data_select = self.driver.find_element_by_name('data_to_look_for') | ||
#since python is an option in the drop down menu from the user interface | ||
data_select.send_keys('python') | ||
|
||
scrape_button = self.driver.find_element_by_css_selector('.input-form button') | ||
scrape_button.click() | ||
|
||
# Wait for the results page to load | ||
self.wait.until(EC.presence_of_element_located((By.CSS_SELECTOR, '.results ul li'))) | ||
|
||
# Verify the scraped data on the results page | ||
scraped_data = self.driver.find_elements_by_css_selector('.results ul li') | ||
self.assertTrue(scraped_data) # Assert that scraped data is present | ||
|
||
if __name__ == '__main__': | ||
unittest.main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,54 @@ | ||
import unittest | ||
from unittest.mock import patch, Mock | ||
from scraping import scrape_website | ||
import requests | ||
|
||
class TestScraping(unittest.TestCase): | ||
|
||
def input_url(self): | ||
return input("Enter the website URL: ") | ||
|
||
@patch('builtins.input', side_effect=['http://example.com']) | ||
@patch('requests.get') | ||
def test_successful_scrape(self, mock_requests_get, mock_input): | ||
# Arrange | ||
mock_response = Mock() | ||
mock_response.status_code = 200 | ||
mock_response.content = '<html><body><p>Test content</p></body></html>' | ||
mock_requests_get.return_value = mock_response | ||
|
||
# Act | ||
result = scrape_website(self.input_url()) | ||
|
||
# Assert | ||
self.assertEqual(result, ['Test content']) | ||
|
||
@patch('builtins.input', side_effect=['http://example.com']) | ||
@patch('requests.get', side_effect=requests.HTTPError('HTTP Error')) | ||
def test_http_error(self, mock_requests_get, mock_input): | ||
# Act | ||
result = scrape_website(self.input_url()) | ||
|
||
# Assert | ||
self.assertIsNone(result) | ||
|
||
@patch('builtins.input', side_effect=['http://example.com']) | ||
@patch('requests.get', side_effect=requests.RequestException('Request Exception')) | ||
def test_request_exception(self, mock_requests_get, mock_input): | ||
# Act | ||
result = scrape_website(self.input_url()) | ||
|
||
# Assert | ||
self.assertIsNone(result) | ||
|
||
@patch('builtins.input', side_effect=['http://example.com']) | ||
@patch('requests.get', side_effect=Exception('Some Other Exception')) | ||
def test_other_exception(self, mock_requests_get, mock_input): | ||
# Act | ||
result = scrape_website(self.input_url()) | ||
|
||
# Assert | ||
self.assertIsNone(result) | ||
|
||
if __name__ == '__main__': | ||
unittest.main() |