Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Google Search: results per page #79

Merged
merged 3 commits into from
Nov 22, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
114 changes: 77 additions & 37 deletions tests/test_serp.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from urllib.parse import quote_plus

import pytest
from pydantic import ValidationError
from scrapy import Request
Expand All @@ -11,6 +13,44 @@
from .utils import assertEqualSpiderMetadata


def run_parse_serp(spider, total_results=99999, page=1, query="foo"):
url = f"https://www.google.com/search?q={quote_plus(query)}"
if page > 1:
url = add_or_replace_parameter(url, "start", (page - 1) * 10)
response = ZyteAPITextResponse.from_api_response(
api_response={
"serp": {
"organicResults": [
{
"description": "…",
"name": "…",
"url": f"https://example.com/{rank}",
"rank": rank,
}
for rank in range(1, 11)
],
"metadata": {
"dateDownloaded": "2024-10-25T08:59:45Z",
"displayedQuery": query,
"searchedQuery": query,
"totalOrganicResults": total_results,
},
"pageNumber": page,
"url": url,
},
"url": url,
},
)
items = []
requests = []
for item_or_request in spider.parse_serp(response, page_number=page):
if isinstance(item_or_request, Request):
requests.append(item_or_request)
else:
items.append(item_or_request)
return items, requests


def test_parameters():
with pytest.raises(ValidationError):
GoogleSearchSpider()
Expand Down Expand Up @@ -259,6 +299,13 @@ def test_metadata():
"title": "Max Pages",
"type": "integer",
},
"results_per_page": {
"default": 10,
"description": "Maximum number of results per page.",
"minimum": 1,
"title": "Results Per Page",
"type": "integer",
},
"max_requests": {
"anyOf": [{"type": "integer"}, {"type": "null"}],
"default": 100,
Expand Down Expand Up @@ -321,67 +368,37 @@ def test_pagination():
crawler = get_crawler()
spider = GoogleSearchSpider.from_crawler(crawler, search_queries="foo bar")

def run_parse_serp(total_results, page=1):
url = "https://www.google.com/search?q=foo+bar"
if page > 1:
url = add_or_replace_parameter(url, "start", (page - 1) * 10)
response = ZyteAPITextResponse.from_api_response(
api_response={
"serp": {
"organicResults": [
{
"description": "…",
"name": "…",
"url": f"https://example.com/{rank}",
"rank": rank,
}
for rank in range(1, 11)
],
"metadata": {
"dateDownloaded": "2024-10-25T08:59:45Z",
"displayedQuery": "foo bar",
"searchedQuery": "foo bar",
"totalOrganicResults": total_results,
},
"pageNumber": page,
"url": url,
},
"url": url,
},
)
items = []
requests = []
for item_or_request in spider.parse_serp(response, page_number=page):
if isinstance(item_or_request, Request):
requests.append(item_or_request)
else:
items.append(item_or_request)
return items, requests

items, requests = run_parse_serp(
spider,
total_results=10,
)
assert len(items) == 1
assert len(requests) == 0

items, requests = run_parse_serp(
spider,
total_results=11,
query="foo bar",
)
assert len(items) == 1
assert len(requests) == 1
assert requests[0].url == "https://www.google.com/search?q=foo+bar&start=10"
assert requests[0].cb_kwargs["page_number"] == 2

items, requests = run_parse_serp(
spider,
total_results=20,
page=2,
query="foo bar",
)
assert len(items) == 1
assert len(requests) == 0

items, requests = run_parse_serp(
spider,
total_results=21,
page=2,
query="foo bar",
)
assert len(items) == 1
assert len(requests) == 1
Expand Down Expand Up @@ -445,3 +462,26 @@ def test_parse_serp():
# The page_number parameter is required.
with pytest.raises(TypeError):
spider.parse_serp(response)


def test_results_per_page():
crawler = get_crawler()
spider = GoogleSearchSpider.from_crawler(
crawler, search_queries="foo", results_per_page=1
)
requests = list(spider.start_requests())
assert len(requests) == 1
assert requests[0].url == "https://www.google.com/search?q=foo&num=1"

items, requests = run_parse_serp(spider)
assert len(items) == 1
assert len(requests) == 1
assert requests[0].url == "https://www.google.com/search?q=foo&start=1&num=1"


def test_results_per_page_min():
crawler = get_crawler()
with pytest.raises(ValidationError):
GoogleSearchSpider.from_crawler(
crawler, search_queries="foo", results_per_page=0
)
20 changes: 18 additions & 2 deletions zyte_spider_templates/spiders/serp.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,17 @@ class SerpMaxPagesParam(BaseModel):
)


class SerpResultsPerPageParam(BaseModel):
results_per_page: int = Field(
title="Results Per Page",
description="Maximum number of results per page.",
ge=1,
# NOTE: This should match the actual Google Search default, because
# when the default value is passed, it is not set in URLs.
default=10,
Gallaecio marked this conversation as resolved.
Show resolved Hide resolved
)


class GoogleDomainParam(BaseModel):
domain: GoogleDomain = Field(
title="Domain",
Expand All @@ -58,6 +69,7 @@ class GoogleDomainParam(BaseModel):

class GoogleSearchSpiderParams(
MaxRequestsParam,
SerpResultsPerPageParam,
SerpMaxPagesParam,
SearchQueriesParam,
GoogleDomainParam,
Expand All @@ -76,7 +88,6 @@ class GoogleSearchSpider(Args[GoogleSearchSpiderParams], BaseSpider):
"""

name = "google_search"
_results_per_page = 10

metadata: Dict[str, Any] = {
**BaseSpider.metadata,
Expand All @@ -99,6 +110,11 @@ def update_settings(cls, settings: BaseSettings) -> None:
)

def get_serp_request(self, url: str, *, page_number: int):
if (
self.args.results_per_page
!= self.args.model_fields["results_per_page"].default
):
url = add_or_replace_parameter(url, "num", str(self.args.results_per_page))
return Request(
url=url,
callback=self.parse_serp,
Expand Down Expand Up @@ -126,7 +142,7 @@ def start_requests(self) -> Iterable[Request]:
def parse_serp(self, response, page_number) -> Iterable[Union[Request, Serp]]:
serp = Serp.from_dict(response.raw_api_response["serp"])

next_start = page_number * self._results_per_page
next_start = page_number * self.args.results_per_page
if serp.organicResults and serp.metadata.totalOrganicResults > next_start:
next_url = add_or_replace_parameter(serp.url, "start", str(next_start))
yield self.get_serp_request(next_url, page_number=page_number + 1)
Expand Down
Loading