Skip to content

Commit

Permalink
refactor(chalice): upgraded dependencies
Browse files Browse the repository at this point in the history
feat(chalice): support heatmaps
  • Loading branch information
tahayk committed Jun 19, 2024
1 parent cae1798 commit 44dc508
Show file tree
Hide file tree
Showing 18 changed files with 400 additions and 141 deletions.
8 changes: 4 additions & 4 deletions api/Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,15 @@ requests = "==2.32.3"
boto3 = "==1.34.125"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.9"
psycopg = {extras = ["binary", "pool"], version = "==3.1.19"}
elasticsearch = "==8.14.0"
jira = "==3.8.0"
fastapi = "==0.111.0"
python-decouple = "==3.8"
apscheduler = "==4.0.0a5"
redis = "==5.1.0b6"
psycopg = {extras = ["binary", "pool"], version = "==3.1.19"}
uvicorn = {extras = ["standard"], version = "==0.30.1"}
python-decouple = "==3.8"
pydantic = {extras = ["email"], version = "==2.3.0"}
apscheduler = "==3.10.4"
redis = "==5.1.0b6"

[dev-packages]

Expand Down
1 change: 0 additions & 1 deletion api/chalicelib/core/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +0,0 @@
from . import sessions as sessions_legacy
80 changes: 0 additions & 80 deletions api/chalicelib/core/click_maps.py

This file was deleted.

5 changes: 3 additions & 2 deletions api/chalicelib/core/custom_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from fastapi import HTTPException, status

import schemas
from chalicelib.core import sessions, funnels, errors, issues, click_maps, sessions_mobs, product_analytics, \
from chalicelib.core import sessions, funnels, errors, issues, heatmaps, sessions_mobs, product_analytics, \
custom_metrics_predefined
from chalicelib.utils import helper, pg_client
from chalicelib.utils.TimeUTC import TimeUTC
Expand Down Expand Up @@ -90,7 +90,7 @@ def __get_click_map_chart(project_id, user_id, data: schemas.CardClickMap, inclu
return None
data.series[0].filter.filters += data.series[0].filter.events
data.series[0].filter.events = []
return click_maps.search_short_session(project_id=project_id, user_id=user_id,
return heatmaps.search_short_session(project_id=project_id, user_id=user_id,
data=schemas.ClickMapSessionsSearch(
**data.series[0].filter.model_dump()),
include_mobs=include_mobs)
Expand Down Expand Up @@ -178,6 +178,7 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int):
schemas.MetricType.timeseries: __get_timeseries_chart,
schemas.MetricType.table: __get_table_chart,
schemas.MetricType.click_map: __get_click_map_chart,
schemas.MetricType.heat_map: __get_click_map_chart,
schemas.MetricType.funnel: __get_funnel_chart,
schemas.MetricType.insights: not_supported,
schemas.MetricType.pathAnalysis: __get_path_analysis_chart
Expand Down
148 changes: 116 additions & 32 deletions api/chalicelib/core/heatmaps.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import logging

import schemas
from chalicelib.utils import helper, pg_client
from chalicelib.utils import sql_helper as sh
from chalicelib.core import sessions_mobs, sessions, events
from chalicelib.utils import pg_client, helper

# from chalicelib.utils import sql_helper as sh

logger = logging.getLogger(__name__)

Expand All @@ -19,42 +21,41 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
"duration IS NOT NULL",
"normalized_x IS NOT NULL"]
query_from = "events.clicks INNER JOIN sessions USING (session_id)"
q_count = "count(1) AS count"
has_click_rage_filter = False
if len(data.filters) > 0:
for i, f in enumerate(data.filters):
if f.type == schemas.FilterType.issue and len(f.value) > 0:
has_click_rage_filter = True
q_count = "max(real_count) AS count,TRUE AS click_rage"
query_from += """INNER JOIN events_common.issues USING (timestamp, session_id)
INNER JOIN issues AS mis USING (issue_id)
INNER JOIN LATERAL (
SELECT COUNT(1) AS real_count
FROM events.clicks AS sc
INNER JOIN sessions as ss USING (session_id)
WHERE ss.project_id = 2
AND (sc.url = %(url)s OR sc.path = %(url)s)
AND sc.timestamp >= %(startDate)s
AND sc.timestamp <= %(endDate)s
AND ss.start_ts >= %(startDate)s
AND ss.start_ts <= %(endDate)s
AND sc.selector = clicks.selector) AS r_clicks ON (TRUE)"""
constraints += ["mis.project_id = %(project_id)s",
"issues.timestamp >= %(startDate)s",
"issues.timestamp <= %(endDate)s"]
f_k = f"issue_value{i}"
args = {**args, **sh.multi_values(f.value, value_key=f_k)}
constraints.append(sh.multi_conditions(f"%({f_k})s = ANY (issue_types)",
f.value, value_key=f_k))
constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
f.value, value_key=f_k))
# TODO: is this used ?
# if len(data.filters) > 0:
# for i, f in enumerate(data.filters):
# if f.type == schemas.FilterType.issue and len(f.value) > 0:
# has_click_rage_filter = True
# query_from += """INNER JOIN events_common.issues USING (timestamp, session_id)
# INNER JOIN issues AS mis USING (issue_id)
# INNER JOIN LATERAL (
# SELECT COUNT(1) AS real_count
# FROM events.clicks AS sc
# INNER JOIN sessions as ss USING (session_id)
# WHERE ss.project_id = 2
# AND (sc.url = %(url)s OR sc.path = %(url)s)
# AND sc.timestamp >= %(startDate)s
# AND sc.timestamp <= %(endDate)s
# AND ss.start_ts >= %(startDate)s
# AND ss.start_ts <= %(endDate)s
# AND sc.selector = clicks.selector) AS r_clicks ON (TRUE)"""
# constraints += ["mis.project_id = %(project_id)s",
# "issues.timestamp >= %(startDate)s",
# "issues.timestamp <= %(endDate)s"]
# f_k = f"issue_value{i}"
# args = {**args, **sh.multi_values(f.value, value_key=f_k)}
# constraints.append(sh.multi_conditions(f"%({f_k})s = ANY (issue_types)",
# f.value, value_key=f_k))
# constraints.append(sh.multi_conditions(f"mis.type = %({f_k})s",
# f.value, value_key=f_k))

if data.click_rage and not has_click_rage_filter:
constraints.append("""(issues.session_id IS NULL
OR (issues.timestamp >= %(startDate)s
AND issues.timestamp <= %(endDate)s
AND mis.project_id = %(project_id)s))""")
q_count += ",COALESCE(bool_or(mis.type = 'click_rage'), FALSE) AS click_rage"
AND mis.project_id = %(project_id)s
AND mis.type='click_rage'))""")
query_from += """LEFT JOIN events_common.issues USING (timestamp, session_id)
LEFT JOIN issues AS mis USING (issue_id)"""
with pg_client.PostgresClient() as cur:
Expand All @@ -77,3 +78,86 @@ def get_by_url(project_id, data: schemas.GetHeatmapPayloadSchema):
rows = cur.fetchall()

return helper.list_to_camel_case(rows)


SESSION_PROJECTION_COLS = """s.project_id,
s.session_id::text AS session_id,
s.user_uuid,
s.user_id,
s.user_os,
s.user_browser,
s.user_device,
s.user_device_type,
s.user_country,
s.start_ts,
s.duration,
s.events_count,
s.pages_count,
s.errors_count,
s.user_anonymous_id,
s.platform,
s.issue_score,
to_jsonb(s.issue_types) AS issue_types,
favorite_sessions.session_id NOTNULL AS favorite,
COALESCE((SELECT TRUE
FROM public.user_viewed_sessions AS fs
WHERE s.session_id = fs.session_id
AND fs.user_id = %(userId)s LIMIT 1), FALSE) AS viewed """


def search_short_session(data: schemas.ClickMapSessionsSearch, project_id, user_id,
include_mobs: bool = True, exclude_sessions: list[str] = [],
_depth: int = 3):
no_platform = True
for f in data.filters:
if f.type == schemas.FilterType.platform:
no_platform = False
break
if no_platform:
data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform,
value=[schemas.PlatformType.desktop],
operator=schemas.SearchEventOperator._is))

full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False,
favorite_only=data.bookmarked, issue=None,
project_id=project_id, user_id=user_id)
full_args["exclude_sessions"] = tuple(exclude_sessions)
if len(exclude_sessions) > 0:
query_part += "\n AND session_id NOT IN (%(exclude_sessions)s)"
with pg_client.PostgresClient() as cur:
data.order = schemas.SortOrderType.desc
data.sort = 'duration'
main_query = cur.mogrify(f"""SELECT {SESSION_PROJECTION_COLS}
{query_part}
ORDER BY {data.sort} {data.order.value}
LIMIT 1;""", full_args)
logger.debug("--------------------")
logger.debug(main_query)
logger.debug("--------------------")
try:
cur.execute(main_query)
except Exception as err:
logger.warning("--------- CLICK MAP SHORT SESSION SEARCH QUERY EXCEPTION -----------")
logger.warning(main_query.decode('UTF-8'))
logger.warning("--------- PAYLOAD -----------")
logger.warning(data.model_dump_json())
logger.warning("--------------------")
raise err

session = cur.fetchone()
if session:
if include_mobs:
session['domURL'] = sessions_mobs.get_urls(session_id=session["session_id"], project_id=project_id)
session['mobsUrl'] = sessions_mobs.get_urls_depercated(session_id=session["session_id"])
if _depth > 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
return search_short_session(data=data, project_id=project_id, user_id=user_id,
include_mobs=include_mobs,
exclude_sessions=exclude_sessions + [session["session_id"]],
_depth=_depth - 1)
elif _depth == 0 and len(session['domURL']) == 0 and len(session['mobsUrl']) == 0:
logger.info("couldn't find an existing replay after 3 iterations for heatmap")

session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"],
event_type=schemas.EventType.location)

return helper.dict_to_camel_case(session)
2 changes: 1 addition & 1 deletion api/requirements-alerts.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,4 @@ fastapi==0.111.0
uvicorn[standard]==0.30.1
python-decouple==3.8
pydantic[email]==2.3.0
apscheduler==4.0.0a5
apscheduler==3.10.4
2 changes: 1 addition & 1 deletion api/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,6 @@ fastapi==0.111.0
uvicorn[standard]==0.30.1
python-decouple==3.8
pydantic[email]==2.3.0
apscheduler==4.0.0a5
apscheduler==3.10.4

redis==5.1.0b6
2 changes: 1 addition & 1 deletion api/routers/core_dynamic.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

import schemas
from chalicelib.core import sessions, errors, errors_viewed, errors_favorite, sessions_assignments, heatmaps, \
sessions_favorite, assist, sessions_notes, click_maps, sessions_replay, signup, feature_flags
sessions_favorite, assist, sessions_notes, sessions_replay, signup, feature_flags
from chalicelib.core import sessions_viewed
from chalicelib.core import tenants, users, projects, license
from chalicelib.core import webhook
Expand Down
2 changes: 2 additions & 0 deletions api/schemas/schemas.py
Original file line number Diff line number Diff line change
Expand Up @@ -934,6 +934,8 @@ class MetricType(str, Enum):
retention = "retention"
stickiness = "stickiness"
click_map = "clickMap"
# click_map and heat_map are the same
heat_map = "heatMap"
insights = "insights"


Expand Down
2 changes: 0 additions & 2 deletions ee/api/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,6 @@ Pipfile.lock
/chalicelib/core/authorizers.py
/chalicelib/core/autocomplete.py
/chalicelib/core/canvas.py
/chalicelib/core/click_maps.py
/chalicelib/core/collaboration_base.py
/chalicelib/core/collaboration_msteams.py
/chalicelib/core/collaboration_slack.py
Expand All @@ -201,7 +200,6 @@ Pipfile.lock
/chalicelib/core/events_mobile.py
/chalicelib/core/feature_flags.py
/chalicelib/core/funnels.py
/chalicelib/core/heatmaps.py
/chalicelib/core/integration_base.py
/chalicelib/core/integration_base_issue.py
/chalicelib/core/integration_github.py
Expand Down
10 changes: 5 additions & 5 deletions ee/api/Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -9,19 +9,19 @@ requests = "==2.32.3"
boto3 = "==1.34.125"
pyjwt = "==2.8.0"
psycopg2-binary = "==2.9.9"
psycopg = {extras = ["binary", "pool"], version = "==3.1.19"}
elasticsearch = "==8.14.0"
jira = "==3.8.0"
fastapi = "==0.111.0"
uvicorn = {extras = ["standard"], version = "==0.30.1"}
gunicorn = "==22.0.0"
python-decouple = "==3.8"
apscheduler = "==4.0.0a5"
pydantic = {extras = ["email"], version = "==2.3.0"}
apscheduler = "==3.10.4"
clickhouse-driver = {extras = ["lz4"], version = "==0.2.8"}
python3-saml = "==1.16.0"
redis = "==5.1.0b6"
azure-storage-blob = "==12.21.0b1"
psycopg = {extras = ["binary", "pool"], version = "==3.1.19"}
uvicorn = {extras = ["standard"], version = "==0.30.1"}
pydantic = {extras = ["email"], version = "==2.3.0"}
clickhouse-driver = {extras = ["lz4"], version = "==0.2.8"}

[dev-packages]

Expand Down
Loading

0 comments on commit 44dc508

Please sign in to comment.