From c0ff2265c72eed62acece65be1fc1a8a144e707f Mon Sep 17 00:00:00 2001 From: Taha Yassine Kraiem Date: Wed, 17 Jul 2024 15:59:13 +0200 Subject: [PATCH] refactor(chalice): refactored schemas --- api/chalicelib/core/alerts.py | 2 +- api/chalicelib/core/alerts_processor.py | 46 +- api/chalicelib/core/assist.py | 28 +- api/chalicelib/core/autocomplete.py | 30 +- api/chalicelib/core/collaboration_msteams.py | 8 +- api/chalicelib/core/collaboration_slack.py | 8 +- api/chalicelib/core/custom_metrics.py | 66 +- .../core/custom_metrics_predefined.py | 90 +- api/chalicelib/core/errors.py | 18 +- api/chalicelib/core/events.py | 42 +- api/chalicelib/core/feature_flags.py | 6 +- api/chalicelib/core/funnels.py | 14 +- api/chalicelib/core/heatmaps.py | 22 +- api/chalicelib/core/integration_github.py | 2 +- api/chalicelib/core/integration_jira_cloud.py | 2 +- api/chalicelib/core/integrations_global.py | 26 +- api/chalicelib/core/metrics.py | 36 +- api/chalicelib/core/performance_event.py | 12 +- api/chalicelib/core/product_analytics.py | 54 +- api/chalicelib/core/sessions.py | 160 ++-- api/chalicelib/core/sessions_metas.py | 114 +-- api/chalicelib/core/significance.py | 24 +- api/chalicelib/core/webhook.py | 2 +- api/chalicelib/utils/helper.py | 18 +- api/chalicelib/utils/sql_helper.py | 30 +- api/routers/core.py | 32 +- api/schemas/schemas.py | 784 +++++++++--------- api/test/test_feature_flag.py | 2 +- ee/api/chalicelib/core/alerts_processor.py | 46 +- .../chalicelib/core/alerts_processor_exp.py | 44 +- ee/api/chalicelib/core/assist_records.py | 2 +- ee/api/chalicelib/core/autocomplete_exp.py | 30 +- ee/api/chalicelib/core/custom_metrics.py | 64 +- ee/api/chalicelib/core/errors.py | 18 +- ee/api/chalicelib/core/errors_exp.py | 82 +- ee/api/chalicelib/core/events.py | 42 +- ee/api/chalicelib/core/heatmaps.py | 46 +- ee/api/chalicelib/core/integrations_global.py | 26 +- ee/api/chalicelib/core/metrics.py | 22 +- ee/api/chalicelib/core/product_analytics.py | 52 +- ee/api/chalicelib/core/sessions_devtool.py | 2 +- ee/api/chalicelib/core/sessions_exp.py | 240 +++--- ee/api/chalicelib/core/sessions_insights.py | 18 +- ee/api/chalicelib/core/sessions_metas.py | 114 +-- ee/api/chalicelib/core/traces.py | 2 +- ee/api/chalicelib/core/webhook.py | 2 +- ee/api/routers/core_dynamic.py | 70 +- ee/api/routers/subs/insights.py | 2 +- ee/api/routers/subs/metrics.py | 2 +- ee/api/schemas/schemas_ee.py | 33 +- 50 files changed, 1312 insertions(+), 1325 deletions(-) diff --git a/api/chalicelib/core/alerts.py b/api/chalicelib/core/alerts.py index a643c366b2..2f34e6d8e9 100644 --- a/api/chalicelib/core/alerts.py +++ b/api/chalicelib/core/alerts.py @@ -229,5 +229,5 @@ def get_predefined_values(): "unit": "count" if v.endswith(".count") else "ms", "predefined": True, "metricId": None, - "seriesId": None} for v in values if v != schemas.AlertColumn.custom] + "seriesId": None} for v in values if v != schemas.AlertColumn.CUSTOM] return values diff --git a/api/chalicelib/core/alerts_processor.py b/api/chalicelib/core/alerts_processor.py index 4bca78e811..08489c0175 100644 --- a/api/chalicelib/core/alerts_processor.py +++ b/api/chalicelib/core/alerts_processor.py @@ -14,60 +14,60 @@ logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) LeftToDb = { - schemas.AlertColumn.performance__dom_content_loaded__average: { + schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"}, - schemas.AlertColumn.performance__first_meaningful_paint__average: { + schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"}, - schemas.AlertColumn.performance__page_load_time__average: { + schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"}, - schemas.AlertColumn.performance__dom_build_time__average: { + schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(dom_building_time,0))"}, - schemas.AlertColumn.performance__speed_index__average: { + schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"}, - schemas.AlertColumn.performance__page_response_time__average: { + schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(response_time,0))"}, - schemas.AlertColumn.performance__ttfb__average: { + schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(first_paint_time,0))"}, - schemas.AlertColumn.performance__time_to_render__average: { + schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(visually_complete,0))"}, - schemas.AlertColumn.performance__image_load_time__average: { + schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"}, - schemas.AlertColumn.performance__request_load_time__average: { + schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"}, - schemas.AlertColumn.resources__load_time__average: { + schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(resources.duration,0))"}, - schemas.AlertColumn.resources__missing__count: { + schemas.AlertColumn.RESOURCES__MISSING__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"}, - schemas.AlertColumn.errors__4xx_5xx__count: { + schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100!=2"}, - schemas.AlertColumn.errors__4xx__count: { + schemas.AlertColumn.ERRORS__4XX__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100=4"}, - schemas.AlertColumn.errors__5xx__count: { + schemas.AlertColumn.ERRORS__5XX__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100=5"}, - schemas.AlertColumn.errors__javascript__impacted_sessions__count: { + schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"}, - schemas.AlertColumn.performance__crashes__count: { + schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: { "table": "public.sessions", "formula": "COUNT(DISTINCT session_id)", "condition": "errors_count > 0 AND duration>0"}, - schemas.AlertColumn.errors__javascript__count: { + schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: { "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False}, - schemas.AlertColumn.errors__backend__count: { + schemas.AlertColumn.ERRORS__BACKEND__COUNT: { "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False}, } @@ -87,7 +87,7 @@ def can_check(a) -> bool: now = TimeUTC.now() repetitionBase = a["options"]["currentPeriod"] \ - if a["detectionMethod"] == schemas.AlertDetectionMethod.change \ + if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \ and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \ else a["options"]["previousPeriod"] @@ -110,7 +110,7 @@ def Build(a): main_table = "" if a["seriesId"] is not None: a["filter"]["sort"] = "session_id" - a["filter"]["order"] = schemas.SortOrderType.desc + a["filter"]["order"] = schemas.SortOrderType.DESC a["filter"]["startDate"] = 0 a["filter"]["endDate"] = TimeUTC.now() try: @@ -136,7 +136,7 @@ def Build(a): is_ss = main_table == "public.sessions" q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid""" - if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold: + if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD: if a["seriesId"] is not None: q += f""" FROM ({subQ}) AS stat""" else: @@ -144,7 +144,7 @@ def Build(a): {"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}) AS stat""" params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000} else: - if a["change"] == schemas.AlertDetectionType.change: + if a["change"] == schemas.AlertDetectionType.CHANGE: if a["seriesId"] is not None: sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s") sub1 = f"SELECT (({subQ})-({sub2})) AS value" diff --git a/api/chalicelib/core/assist.py b/api/chalicelib/core/assist.py index 7534eb5ecb..a47940f9f9 100644 --- a/api/chalicelib/core/assist.py +++ b/api/chalicelib/core/assist.py @@ -52,7 +52,7 @@ def get_live_sessions_ws(project_id, body: schemas.LiveSessionsSearchPayloadSche "sort": {"key": body.sort, "order": body.order} } for f in body.filters: - if f.type == schemas.LiveFilterType.metadata: + if f.type == schemas.LiveFilterType.METADATA: data["filter"][f.source] = {"values": f.value, "operator": f.operator} else: @@ -281,23 +281,23 @@ def session_exists(project_id, session_id): def __change_keys(key): return { - "PAGETITLE": schemas.LiveFilterType.page_title.value, + "PAGETITLE": schemas.LiveFilterType.PAGE_TITLE.value, "ACTIVE": "active", "LIVE": "live", - "SESSIONID": schemas.LiveFilterType.session_id.value, - "METADATA": schemas.LiveFilterType.metadata.value, - "USERID": schemas.LiveFilterType.user_id.value, - "USERUUID": schemas.LiveFilterType.user_UUID.value, + "SESSIONID": schemas.LiveFilterType.SESSION_ID.value, + "METADATA": schemas.LiveFilterType.METADATA.value, + "USERID": schemas.LiveFilterType.USER_ID.value, + "USERUUID": schemas.LiveFilterType.USER_UUID.value, "PROJECTKEY": "projectKey", - "REVID": schemas.LiveFilterType.rev_id.value, + "REVID": schemas.LiveFilterType.REV_ID.value, "TIMESTAMP": "timestamp", - "TRACKERVERSION": schemas.LiveFilterType.tracker_version.value, + "TRACKERVERSION": schemas.LiveFilterType.TRACKER_VERSION.value, "ISSNIPPET": "isSnippet", - "USEROS": schemas.LiveFilterType.user_os.value, - "USERBROWSER": schemas.LiveFilterType.user_browser.value, - "USERBROWSERVERSION": schemas.LiveFilterType.user_browser_version.value, - "USERDEVICE": schemas.LiveFilterType.user_device.value, - "USERDEVICETYPE": schemas.LiveFilterType.user_device_type.value, - "USERCOUNTRY": schemas.LiveFilterType.user_country.value, + "USEROS": schemas.LiveFilterType.USER_OS.value, + "USERBROWSER": schemas.LiveFilterType.USER_BROWSER.value, + "USERBROWSERVERSION": schemas.LiveFilterType.USER_BROWSER_VERSION.value, + "USERDEVICE": schemas.LiveFilterType.USER_DEVICE.value, + "USERDEVICETYPE": schemas.LiveFilterType.USER_DEVICE_TYPE.value, + "USERCOUNTRY": schemas.LiveFilterType.USER_COUNTRY.value, "PROJECTID": "projectId" }.get(key.upper(), key) diff --git a/api/chalicelib/core/autocomplete.py b/api/chalicelib/core/autocomplete.py index fec9c4d4db..61262d1bf2 100644 --- a/api/chalicelib/core/autocomplete.py +++ b/api/chalicelib/core/autocomplete.py @@ -8,23 +8,23 @@ def __get_autocomplete_table(value, project_id): - autocomplete_events = [schemas.FilterType.rev_id, - schemas.EventType.click, - schemas.FilterType.user_device, - schemas.FilterType.user_id, - schemas.FilterType.user_browser, - schemas.FilterType.user_os, - schemas.EventType.custom, - schemas.FilterType.user_country, - schemas.FilterType.user_city, - schemas.FilterType.user_state, - schemas.EventType.location, - schemas.EventType.input] + autocomplete_events = [schemas.FilterType.REV_ID, + schemas.EventType.CLICK, + schemas.FilterType.USER_DEVICE, + schemas.FilterType.USER_ID, + schemas.FilterType.USER_BROWSER, + schemas.FilterType.USER_OS, + schemas.EventType.CUSTOM, + schemas.FilterType.USER_COUNTRY, + schemas.FilterType.USER_CITY, + schemas.FilterType.USER_STATE, + schemas.EventType.LOCATION, + schemas.EventType.INPUT] autocomplete_events.sort() sub_queries = [] c_list = [] for e in autocomplete_events: - if e == schemas.FilterType.user_country: + if e == schemas.FilterType.USER_COUNTRY: c_list = countries.get_country_code_autocomplete(value) if len(c_list) > 0: sub_queries.append(f"""(SELECT DISTINCT ON(value) '{e.value}' AS _type, value @@ -72,7 +72,7 @@ def __get_autocomplete_table(value, project_id): def __generic_query(typename, value_length=None): - if typename == schemas.FilterType.user_country: + if typename == schemas.FilterType.USER_COUNTRY: return f"""SELECT DISTINCT value, type FROM {TABLE} WHERE @@ -127,7 +127,7 @@ def f(project_id, text): params = {"project_id": project_id, "value": helper.string_to_sql_like(text), "svalue": helper.string_to_sql_like("^" + text)} - if typename == schemas.FilterType.user_country: + if typename == schemas.FilterType.USER_COUNTRY: params["value"] = tuple(countries.get_country_code_autocomplete(text)) if len(params["value"]) == 0: return [] diff --git a/api/chalicelib/core/collaboration_msteams.py b/api/chalicelib/core/collaboration_msteams.py index 289da889ae..7034685f61 100644 --- a/api/chalicelib/core/collaboration_msteams.py +++ b/api/chalicelib/core/collaboration_msteams.py @@ -15,12 +15,12 @@ class MSTeams(BaseCollaboration): @classmethod def add(cls, tenant_id, data: schemas.AddCollaborationSchema): if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None, - webhook_type=schemas.WebhookType.msteams): + webhook_type=schemas.WebhookType.MSTEAMS): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") if cls.say_hello(data.url): return webhook.add(tenant_id=tenant_id, endpoint=data.url.unicode_string(), - webhook_type=schemas.WebhookType.msteams, + webhook_type=schemas.WebhookType.MSTEAMS, name=data.name) return None @@ -157,9 +157,9 @@ def share_error(cls, tenant_id, project_id, error_id, user, comment, project_nam def get_integration(cls, tenant_id, integration_id=None): if integration_id is not None: return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id, - webhook_type=schemas.WebhookType.msteams) + webhook_type=schemas.WebhookType.MSTEAMS) - integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.msteams) + integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.MSTEAMS) if integrations is None or len(integrations) == 0: return None return integrations[0] diff --git a/api/chalicelib/core/collaboration_slack.py b/api/chalicelib/core/collaboration_slack.py index 551068df54..42cb883231 100644 --- a/api/chalicelib/core/collaboration_slack.py +++ b/api/chalicelib/core/collaboration_slack.py @@ -13,12 +13,12 @@ class Slack(BaseCollaboration): @classmethod def add(cls, tenant_id, data: schemas.AddCollaborationSchema): if webhook.exists_by_name(tenant_id=tenant_id, name=data.name, exclude_id=None, - webhook_type=schemas.WebhookType.slack): + webhook_type=schemas.WebhookType.SLACK): raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"name already exists.") if cls.say_hello(data.url): return webhook.add(tenant_id=tenant_id, endpoint=data.url.unicode_string(), - webhook_type=schemas.WebhookType.slack, + webhook_type=schemas.WebhookType.SLACK, name=data.name) return None @@ -118,9 +118,9 @@ def share_error(cls, tenant_id, project_id, error_id, user, comment, project_nam def get_integration(cls, tenant_id, integration_id=None): if integration_id is not None: return webhook.get_webhook(tenant_id=tenant_id, webhook_id=integration_id, - webhook_type=schemas.WebhookType.slack) + webhook_type=schemas.WebhookType.SLACK) - integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.slack) + integrations = webhook.get_by_type(tenant_id=tenant_id, webhook_type=schemas.WebhookType.SLACK) if integrations is None or len(integrations) == 0: return None return integrations[0] diff --git a/api/chalicelib/core/custom_metrics.py b/api/chalicelib/core/custom_metrics.py index 5256facfd6..e26e480201 100644 --- a/api/chalicelib/core/custom_metrics.py +++ b/api/chalicelib/core/custom_metrics.py @@ -139,14 +139,14 @@ def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int): supported = { - schemas.MetricOfTable.sessions: __get_table_of_sessions, - schemas.MetricOfTable.errors: __get_table_of_errors, - schemas.MetricOfTable.user_id: __get_table_of_user_ids, - schemas.MetricOfTable.issues: __get_table_of_issues, - schemas.MetricOfTable.user_browser: __get_table_of_browsers, - schemas.MetricOfTable.user_device: __get_table_of_devises, - schemas.MetricOfTable.user_country: __get_table_of_countries, - schemas.MetricOfTable.visited_url: __get_table_of_urls, + schemas.MetricOfTable.SESSIONS: __get_table_of_sessions, + schemas.MetricOfTable.ERRORS: __get_table_of_errors, + schemas.MetricOfTable.USER_ID: __get_table_of_user_ids, + schemas.MetricOfTable.ISSUES: __get_table_of_issues, + schemas.MetricOfTable.USER_BROWSER: __get_table_of_browsers, + schemas.MetricOfTable.USER_DEVICE: __get_table_of_devises, + schemas.MetricOfTable.USER_COUNTRY: __get_table_of_countries, + schemas.MetricOfTable.VISITED_URL: __get_table_of_urls, } return supported.get(data.metric_of, not_supported)(project_id=project_id, data=data, user_id=user_id) @@ -158,12 +158,12 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int): data=data.model_dump()) supported = { - schemas.MetricType.timeseries: __get_timeseries_chart, - schemas.MetricType.table: __get_table_chart, - schemas.MetricType.heat_map: __get_heat_map_chart, - schemas.MetricType.funnel: __get_funnel_chart, - schemas.MetricType.insights: not_supported, - schemas.MetricType.pathAnalysis: __get_path_analysis_chart + schemas.MetricType.TIMESERIES: __get_timeseries_chart, + schemas.MetricType.TABLE: __get_table_chart, + schemas.MetricType.HEAT_MAP: __get_heat_map_chart, + schemas.MetricType.FUNNEL: __get_funnel_chart, + schemas.MetricType.INSIGHTS: not_supported, + schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart } return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id) @@ -273,18 +273,18 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card filters=filters ) # ---- To make issues response close to the chart response - search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count, - operator=schemas.MathOperator._greater, + search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, + operator=schemas.MathOperator.GREATER, value=[1])) if len(data.start_point) == 0: - search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location, - operator=schemas.SearchEventOperator._is_any, + search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, + operator=schemas.SearchEventOperator.IS_ANY, value=[])) # ---- End for s in data.excludes: search_data.events.append(schemas.SessionSearchEventSchema2(type=s.type, - operator=schemas.SearchEventOperator._not_on, + operator=schemas.SearchEventOperator.NOT_ON, value=s.value)) result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data) return result @@ -293,15 +293,15 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card def get_issues(project_id: int, user_id: int, data: schemas.CardSchema): if data.is_predefined: return not_supported() - if data.metric_of == schemas.MetricOfTable.issues: + if data.metric_of == schemas.MetricOfTable.ISSUES: return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data) supported = { - schemas.MetricType.timeseries: not_supported, - schemas.MetricType.table: not_supported, - schemas.MetricType.heat_map: not_supported, - schemas.MetricType.funnel: __get_funnel_issues, - schemas.MetricType.insights: not_supported, - schemas.MetricType.pathAnalysis: __get_path_analysis_issues, + schemas.MetricType.TIMESERIES: not_supported, + schemas.MetricType.TABLE: not_supported, + schemas.MetricType.HEAT_MAP: not_supported, + schemas.MetricType.FUNNEL: __get_funnel_issues, + schemas.MetricType.INSIGHTS: not_supported, + schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues, } return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id) @@ -317,7 +317,7 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis): def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False): with pg_client.PostgresClient() as cur: session_data = None - if data.metric_type == schemas.MetricType.heat_map: + if data.metric_type == schemas.MetricType.HEAT_MAP: if data.session_id is not None: session_data = {"sessionId": data.session_id} else: @@ -336,7 +336,7 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False): params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data} params["default_config"] = json.dumps(data.default_config.model_dump()) params["card_info"] = None - if data.metric_type == schemas.MetricType.pathAnalysis: + if data.metric_type == schemas.MetricType.PATH_ANALYSIS: params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) query = """INSERT INTO metrics (project_id, user_id, name, is_public, @@ -399,9 +399,9 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema): params["d_series_ids"] = tuple(d_series_ids) params["card_info"] = None params["session_data"] = json.dumps(metric["data"]) - if data.metric_type == schemas.MetricType.pathAnalysis: + if data.metric_type == schemas.MetricType.PATH_ANALYSIS: params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) - elif data.metric_type == schemas.MetricType.heat_map: + elif data.metric_type == schemas.MetricType.HEAT_MAP: if data.session_id is not None: params["session_data"] = json.dumps({"sessionId": data.session_id}) elif metric.get("data") and metric["data"].get("sessionId"): @@ -465,7 +465,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser if data.query is not None and len(data.query) > 0: constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)") params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator.CONTAINS) with pg_client.PostgresClient() as cur: sub_join = "" if include_series: @@ -592,7 +592,7 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: for s in row["series"]: s["filter"] = helper.old_search_payload_to_flat(s["filter"]) row = helper.dict_to_camel_case(row) - if row["metricType"] == schemas.MetricType.pathAnalysis: + if row["metricType"] == schemas.MetricType.PATH_ANALYSIS: row = __get_path_analysis_attributes(row=row) return row @@ -691,7 +691,7 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi return custom_metrics_predefined.get_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump()) - elif metric.metric_type == schemas.MetricType.heat_map: + elif metric.metric_type == schemas.MetricType.HEAT_MAP: if raw_metric["data"] and raw_metric["data"].get("sessionId"): return heatmaps.get_selected_session(project_id=project_id, session_id=raw_metric["data"]["sessionId"]) diff --git a/api/chalicelib/core/custom_metrics_predefined.py b/api/chalicelib/core/custom_metrics_predefined.py index 49b984fe12..3236d566fb 100644 --- a/api/chalicelib/core/custom_metrics_predefined.py +++ b/api/chalicelib/core/custom_metrics_predefined.py @@ -12,51 +12,51 @@ def get_metric(key: Union[schemas.MetricOfWebVitals, schemas.MetricOfErrors, \ schemas.MetricOfPerformance, schemas.MetricOfResources], project_id: int, data: dict): - supported = {schemas.MetricOfWebVitals.count_sessions: metrics.get_processed_sessions, - schemas.MetricOfWebVitals.avg_image_load_time: metrics.get_application_activity_avg_image_load_time, - schemas.MetricOfWebVitals.avg_page_load_time: metrics.get_application_activity_avg_page_load_time, - schemas.MetricOfWebVitals.avg_request_load_time: metrics.get_application_activity_avg_request_load_time, - schemas.MetricOfWebVitals.avg_dom_content_load_start: metrics.get_page_metrics_avg_dom_content_load_start, - schemas.MetricOfWebVitals.avg_first_contentful_pixel: metrics.get_page_metrics_avg_first_contentful_pixel, - schemas.MetricOfWebVitals.avg_visited_pages: metrics.get_user_activity_avg_visited_pages, - schemas.MetricOfWebVitals.avg_session_duration: metrics.get_user_activity_avg_session_duration, - schemas.MetricOfWebVitals.avg_pages_dom_buildtime: metrics.get_pages_dom_build_time, - schemas.MetricOfWebVitals.avg_pages_response_time: metrics.get_pages_response_time, - schemas.MetricOfWebVitals.avg_response_time: metrics.get_top_metrics_avg_response_time, - schemas.MetricOfWebVitals.avg_first_paint: metrics.get_top_metrics_avg_first_paint, - schemas.MetricOfWebVitals.avg_dom_content_loaded: metrics.get_top_metrics_avg_dom_content_loaded, - schemas.MetricOfWebVitals.avg_till_first_byte: metrics.get_top_metrics_avg_till_first_bit, - schemas.MetricOfWebVitals.avg_time_to_interactive: metrics.get_top_metrics_avg_time_to_interactive, - schemas.MetricOfWebVitals.count_requests: metrics.get_top_metrics_count_requests, - schemas.MetricOfWebVitals.avg_time_to_render: metrics.get_time_to_render, - schemas.MetricOfWebVitals.avg_used_js_heap_size: metrics.get_memory_consumption, + supported = {schemas.MetricOfWebVitals.COUNT_SESSIONS: metrics.get_processed_sessions, + schemas.MetricOfWebVitals.AVG_IMAGE_LOAD_TIME: metrics.get_application_activity_avg_image_load_time, + schemas.MetricOfWebVitals.AVG_PAGE_LOAD_TIME: metrics.get_application_activity_avg_page_load_time, + schemas.MetricOfWebVitals.AVG_REQUEST_LOAD_TIME: metrics.get_application_activity_avg_request_load_time, + schemas.MetricOfWebVitals.AVG_DOM_CONTENT_LOAD_START: metrics.get_page_metrics_avg_dom_content_load_start, + schemas.MetricOfWebVitals.AVG_FIRST_CONTENTFUL_PIXEL: metrics.get_page_metrics_avg_first_contentful_pixel, + schemas.MetricOfWebVitals.AVG_VISITED_PAGES: metrics.get_user_activity_avg_visited_pages, + schemas.MetricOfWebVitals.AVG_SESSION_DURATION: metrics.get_user_activity_avg_session_duration, + schemas.MetricOfWebVitals.AVG_PAGES_DOM_BUILDTIME: metrics.get_pages_dom_build_time, + schemas.MetricOfWebVitals.AVG_PAGES_RESPONSE_TIME: metrics.get_pages_response_time, + schemas.MetricOfWebVitals.AVG_RESPONSE_TIME: metrics.get_top_metrics_avg_response_time, + schemas.MetricOfWebVitals.AVG_FIRST_PAINT: metrics.get_top_metrics_avg_first_paint, + schemas.MetricOfWebVitals.AVG_DOM_CONTENT_LOADED: metrics.get_top_metrics_avg_dom_content_loaded, + schemas.MetricOfWebVitals.AVG_TILL_FIRST_BYTE: metrics.get_top_metrics_avg_till_first_bit, + schemas.MetricOfWebVitals.AVG_TIME_TO_INTERACTIVE: metrics.get_top_metrics_avg_time_to_interactive, + schemas.MetricOfWebVitals.COUNT_REQUESTS: metrics.get_top_metrics_count_requests, + schemas.MetricOfWebVitals.AVG_TIME_TO_RENDER: metrics.get_time_to_render, + schemas.MetricOfWebVitals.AVG_USED_JS_HEAP_SIZE: metrics.get_memory_consumption, schemas.MetricOfWebVitals.avg_cpu: metrics.get_avg_cpu, - schemas.MetricOfWebVitals.avg_fps: metrics.get_avg_fps, - schemas.MetricOfErrors.impacted_sessions_by_js_errors: metrics.get_impacted_sessions_by_js_errors, - schemas.MetricOfErrors.domains_errors_4xx: metrics.get_domains_errors_4xx, - schemas.MetricOfErrors.domains_errors_5xx: metrics.get_domains_errors_5xx, - schemas.MetricOfErrors.errors_per_domains: metrics.get_errors_per_domains, - schemas.MetricOfErrors.calls_errors: metrics.get_calls_errors, - schemas.MetricOfErrors.errors_per_type: metrics.get_errors_per_type, - schemas.MetricOfErrors.resources_by_party: metrics.get_resources_by_party, - schemas.MetricOfPerformance.speed_location: metrics.get_speed_index_location, - schemas.MetricOfPerformance.slowest_domains: metrics.get_slowest_domains, - schemas.MetricOfPerformance.sessions_per_browser: metrics.get_sessions_per_browser, - schemas.MetricOfPerformance.time_to_render: metrics.get_time_to_render, - schemas.MetricOfPerformance.impacted_sessions_by_slow_pages: metrics.get_impacted_sessions_by_slow_pages, - schemas.MetricOfPerformance.memory_consumption: metrics.get_memory_consumption, - schemas.MetricOfPerformance.cpu: metrics.get_avg_cpu, - schemas.MetricOfPerformance.fps: metrics.get_avg_fps, - schemas.MetricOfPerformance.crashes: metrics.get_crashes, - schemas.MetricOfPerformance.resources_vs_visually_complete: metrics.get_resources_vs_visually_complete, - schemas.MetricOfPerformance.pages_dom_buildtime: metrics.get_pages_dom_build_time, - schemas.MetricOfPerformance.pages_response_time: metrics.get_pages_response_time, - schemas.MetricOfPerformance.pages_response_time_distribution: metrics.get_pages_response_time_distribution, - schemas.MetricOfResources.missing_resources: metrics.get_missing_resources_trend, - schemas.MetricOfResources.slowest_resources: metrics.get_slowest_resources, - schemas.MetricOfResources.resources_loading_time: metrics.get_resources_loading_time, - schemas.MetricOfResources.resource_type_vs_response_end: metrics.resource_type_vs_response_end, - schemas.MetricOfResources.resources_count_by_type: metrics.get_resources_count_by_type, - schemas.MetricOfWebVitals.count_users: metrics.get_unique_users,} + schemas.MetricOfWebVitals.AVG_FPS: metrics.get_avg_fps, + schemas.MetricOfErrors.IMPACTED_SESSIONS_BY_JS_ERRORS: metrics.get_impacted_sessions_by_js_errors, + schemas.MetricOfErrors.DOMAINS_ERRORS_4XX: metrics.get_domains_errors_4xx, + schemas.MetricOfErrors.DOMAINS_ERRORS_5XX: metrics.get_domains_errors_5xx, + schemas.MetricOfErrors.ERRORS_PER_DOMAINS: metrics.get_errors_per_domains, + schemas.MetricOfErrors.CALLS_ERRORS: metrics.get_calls_errors, + schemas.MetricOfErrors.ERRORS_PER_TYPE: metrics.get_errors_per_type, + schemas.MetricOfErrors.RESOURCES_BY_PARTY: metrics.get_resources_by_party, + schemas.MetricOfPerformance.SPEED_LOCATION: metrics.get_speed_index_location, + schemas.MetricOfPerformance.SLOWEST_DOMAINS: metrics.get_slowest_domains, + schemas.MetricOfPerformance.SESSIONS_PER_BROWSER: metrics.get_sessions_per_browser, + schemas.MetricOfPerformance.TIME_TO_RENDER: metrics.get_time_to_render, + schemas.MetricOfPerformance.IMPACTED_SESSIONS_BY_SLOW_PAGES: metrics.get_impacted_sessions_by_slow_pages, + schemas.MetricOfPerformance.MEMORY_CONSUMPTION: metrics.get_memory_consumption, + schemas.MetricOfPerformance.CPU: metrics.get_avg_cpu, + schemas.MetricOfPerformance.FPS: metrics.get_avg_fps, + schemas.MetricOfPerformance.CRASHES: metrics.get_crashes, + schemas.MetricOfPerformance.RESOURCES_VS_VISUALLY_COMPLETE: metrics.get_resources_vs_visually_complete, + schemas.MetricOfPerformance.PAGES_DOM_BUILDTIME: metrics.get_pages_dom_build_time, + schemas.MetricOfPerformance.PAGES_RESPONSE_TIME: metrics.get_pages_response_time, + schemas.MetricOfPerformance.PAGES_RESPONSE_TIME_DISTRIBUTION: metrics.get_pages_response_time_distribution, + schemas.MetricOfResources.MISSING_RESOURCES: metrics.get_missing_resources_trend, + schemas.MetricOfResources.SLOWEST_RESOURCES: metrics.get_slowest_resources, + schemas.MetricOfResources.RESOURCES_LOADING_TIME: metrics.get_resources_loading_time, + schemas.MetricOfResources.RESOURCE_TYPE_VS_RESPONSE_END: metrics.resource_type_vs_response_end, + schemas.MetricOfResources.RESOURCES_COUNT_BY_TYPE: metrics.get_resources_count_by_type, + schemas.MetricOfWebVitals.COUNT_USERS: metrics.get_unique_users, } return supported.get(key, lambda *args: None)(project_id=project_id, **data) diff --git a/api/chalicelib/core/errors.py b/api/chalicelib/core/errors.py index fb1292012b..6d5c5f7df7 100644 --- a/api/chalicelib/core/errors.py +++ b/api/chalicelib/core/errors.py @@ -420,18 +420,18 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if chart: ch_sub_query += [f"timestamp >= generated_timestamp", f"timestamp < generated_timestamp + %({step_size_name})s"] - if platform == schemas.PlatformType.mobile: + if platform == schemas.PlatformType.MOBILE: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == schemas.PlatformType.desktop: + elif platform == schemas.PlatformType.DESKTOP: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query def __get_sort_key(key): return { - schemas.ErrorSort.occurrence: "max_datetime", - schemas.ErrorSort.users_count: "users", - schemas.ErrorSort.sessions_count: "sessions" + schemas.ErrorSort.OCCURRENCE: "max_datetime", + schemas.ErrorSort.USERS_COUNT: "users", + schemas.ErrorSort.SESSIONS_COUNT: "sessions" }.get(key, 'max_datetime') @@ -443,7 +443,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): platform = None for f in data.filters: - if f.type == schemas.FilterType.platform and len(f.value) > 0: + if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0: platform = f.value[0] pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", @@ -472,7 +472,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): sort = __get_sort_key('datetime') if data.sort is not None: sort = __get_sort_key(data.sort) - order = schemas.SortOrderType.desc + order = schemas.SortOrderType.DESC if data.order is not None: order = data.order extra_join = "" @@ -483,7 +483,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): "project_id": project_id, "userId": user_id, "step_size": step_size} - if data.status != schemas.ErrorStatus.all: + if data.status != schemas.ErrorStatus.ALL: pg_sub_query.append("status = %(error_status)s") params["error_status"] = data.status if data.limit is not None and data.page is not None: @@ -502,7 +502,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): if data.query is not None and len(data.query) > 0: pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") params["error_query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator.CONTAINS) main_pg_query = f"""SELECT full_count, error_id, diff --git a/api/chalicelib/core/events.py b/api/chalicelib/core/events.py index 7299a1713d..412d031079 100644 --- a/api/chalicelib/core/events.py +++ b/api/chalicelib/core/events.py @@ -55,7 +55,7 @@ def __get_grouped_clickrage(rows, session_id, project_id): def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None): with pg_client.PostgresClient() as cur: rows = [] - if event_type is None or event_type == schemas.EventType.click: + if event_type is None or event_type == schemas.EventType.CLICK: cur.execute(cur.mogrify("""\ SELECT c.*, @@ -69,7 +69,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: rows += cur.fetchall() if group_clickrage: rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) - if event_type is None or event_type == schemas.EventType.input: + if event_type is None or event_type == schemas.EventType.INPUT: cur.execute(cur.mogrify(""" SELECT i.*, @@ -81,7 +81,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: {"project_id": project_id, "session_id": session_id}) ) rows += cur.fetchall() - if event_type is None or event_type == schemas.EventType.location: + if event_type is None or event_type == schemas.EventType.LOCATION: cur.execute(cur.mogrify("""\ SELECT l.*, @@ -115,26 +115,26 @@ def _search_tags(project_id, value, key=None, source=None): class EventType: - CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label") - INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label") - LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path") - CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name") - REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path") - GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name") - STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name") - TAG = Event(ui_type=schemas.EventType.tag, table="events.tags", column="tag_id") - ERROR = Event(ui_type=schemas.EventType.error, table="events.errors", + CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label") + INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label") + LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path") + CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name") + REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path") + GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name") + STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name") + TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id") + ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors", column=None) # column=None because errors are searched by name or message - METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None) + METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None) # MOBILE - CLICK_MOBILE = Event(ui_type=schemas.EventType.click_mobile, table="events_ios.taps", column="label") - INPUT_MOBILE = Event(ui_type=schemas.EventType.input_mobile, table="events_ios.inputs", column="label") - VIEW_MOBILE = Event(ui_type=schemas.EventType.view_mobile, table="events_ios.views", column="name") - SWIPE_MOBILE = Event(ui_type=schemas.EventType.swipe_mobile, table="events_ios.swipes", column="label") - CUSTOM_MOBILE = Event(ui_type=schemas.EventType.custom_mobile, table="events_common.customs", column="name") - REQUEST_MOBILE = Event(ui_type=schemas.EventType.request_mobile, table="events_common.requests", column="path") - CRASH_MOBILE = Event(ui_type=schemas.EventType.error_mobile, table="events_common.crashes", - column=None) # column=None because errors are searched by name or message + CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label") + INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label") + VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name") + SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label") + CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name") + REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path") + CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes", + column=None) # column=None because errors are searched by name or message SUPPORTED_TYPES = { diff --git a/api/chalicelib/core/feature_flags.py b/api/chalicelib/core/feature_flags.py index 06be8737e3..26dc90e08b 100644 --- a/api/chalicelib/core/feature_flags.py +++ b/api/chalicelib/core/feature_flags.py @@ -106,12 +106,12 @@ def prepare_constraints_params_to_search(data, project_id, user_id): if data.query is not None and len(data.query) > 0: constraints.append("flag_key ILIKE %(query)s") params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator.CONTAINS) return constraints, params def create_feature_flag(project_id: int, user_id: int, feature_flag_data: schemas.FeatureFlagSchema) -> Optional[int]: - if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant and len(feature_flag_data.variants) == 0: + if feature_flag_data.flag_type == schemas.FeatureFlagType.MULTI_VARIANT and len(feature_flag_data.variants) == 0: raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Variants are required for multi variant flag") @@ -193,7 +193,7 @@ def validate_unique_flag_key(feature_flag_data, project_id, exclude_id=None): def validate_multi_variant_flag(feature_flag_data): - if feature_flag_data.flag_type == schemas.FeatureFlagType.multi_variant: + if feature_flag_data.flag_type == schemas.FeatureFlagType.MULTI_VARIANT: if sum([v.rollout_percentage for v in feature_flag_data.variants]) > 100: raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=f"Sum of rollout percentage for variants cannot be greater than 100.") diff --git a/api/chalicelib/core/funnels.py b/api/chalicelib/core/funnels.py index e15e89c315..39400ed081 100644 --- a/api/chalicelib/core/funnels.py +++ b/api/chalicelib/core/funnels.py @@ -7,10 +7,10 @@ def filter_stages(stages: List[schemas.SessionSearchEventSchema2]): - ALLOW_TYPES = [schemas.EventType.click, schemas.EventType.input, - schemas.EventType.location, schemas.EventType.custom, - schemas.EventType.click_mobile, schemas.EventType.input_mobile, - schemas.EventType.view_mobile, schemas.EventType.custom_mobile, ] + ALLOW_TYPES = [schemas.EventType.CLICK, schemas.EventType.INPUT, + schemas.EventType.LOCATION, schemas.EventType.CUSTOM, + schemas.EventType.CLICK_MOBILE, schemas.EventType.INPUT_MOBILE, + schemas.EventType.VIEW_MOBILE, schemas.EventType.CUSTOM_MOBILE, ] return [s for s in stages if s.type in ALLOW_TYPES and s.value is not None] @@ -24,7 +24,7 @@ def __fix_stages(f_events: List[schemas.SessionSearchEventSchema2]): events = [] for e in f_events: if e.operator is None: - e.operator = schemas.SearchEventOperator._is + e.operator = schemas.SearchEventOperator.IS if not isinstance(e.value, list): e.value = [e.value] @@ -47,10 +47,10 @@ def get_top_insights_on_the_fly_widget(project_id, data: schemas.CardSeriesFilte metric_of=metric_of) insights = helper.list_to_camel_case(insights) if len(insights) > 0: - if metric_of == schemas.MetricOfFunnels.session_count and total_drop_due_to_issues > ( + if metric_of == schemas.MetricOfFunnels.SESSION_COUNT and total_drop_due_to_issues > ( insights[0]["sessionsCount"] - insights[-1]["sessionsCount"]): total_drop_due_to_issues = insights[0]["sessionsCount"] - insights[-1]["sessionsCount"] - elif metric_of == schemas.MetricOfFunnels.user_count and total_drop_due_to_issues > ( + elif metric_of == schemas.MetricOfFunnels.USER_COUNT and total_drop_due_to_issues > ( insights[0]["usersCount"] - insights[-1]["usersCount"]): total_drop_due_to_issues = insights[0]["usersCount"] - insights[-1]["usersCount"] insights[-1]["dropDueToIssues"] = total_drop_due_to_issues diff --git a/api/chalicelib/core/heatmaps.py b/api/chalicelib/core/heatmaps.py index 5885e57132..5a4ac5711c 100644 --- a/api/chalicelib/core/heatmaps.py +++ b/api/chalicelib/core/heatmaps.py @@ -149,27 +149,27 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i no_platform = True no_location = True for f in data.filters: - if f.type == schemas.FilterType.platform: + if f.type == schemas.FilterType.PLATFORM: no_platform = False break for f in data.events: - if f.type == schemas.EventType.location: + if f.type == schemas.EventType.LOCATION: no_location = False if len(f.value) == 0: - f.operator = schemas.SearchEventOperator._is_any + f.operator = schemas.SearchEventOperator.IS_ANY break if no_platform: - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform, - value=[schemas.PlatformType.desktop], - operator=schemas.SearchEventOperator._is)) + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM, + value=[schemas.PlatformType.DESKTOP], + operator=schemas.SearchEventOperator.IS)) if no_location: - data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location, + data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, value=[], - operator=schemas.SearchEventOperator._is_any)) + operator=schemas.SearchEventOperator.IS_ANY)) - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count, + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, value=[0], - operator=schemas.MathOperator._greater)) + operator=schemas.MathOperator.GREATER)) full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False, favorite_only=data.bookmarked, issue=None, @@ -178,7 +178,7 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i if len(exclude_sessions) > 0: query_part += "\n AND session_id NOT IN %(exclude_sessions)s" with pg_client.PostgresClient() as cur: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.DESC data.sort = 'duration' main_query = cur.mogrify(f"""SELECT * FROM (SELECT {SESSION_PROJECTION_COLS} diff --git a/api/chalicelib/core/integration_github.py b/api/chalicelib/core/integration_github.py index 41cd292f67..f82eb90674 100644 --- a/api/chalicelib/core/integration_github.py +++ b/api/chalicelib/core/integration_github.py @@ -3,7 +3,7 @@ from chalicelib.core.integration_github_issue import GithubIntegrationIssue from chalicelib.utils import pg_client, helper -PROVIDER = schemas.IntegrationType.github +PROVIDER = schemas.IntegrationType.GITHUB class GitHubIntegration(integration_base.BaseIntegration): diff --git a/api/chalicelib/core/integration_jira_cloud.py b/api/chalicelib/core/integration_jira_cloud.py index a711b484f5..a6dedf94ba 100644 --- a/api/chalicelib/core/integration_jira_cloud.py +++ b/api/chalicelib/core/integration_jira_cloud.py @@ -3,7 +3,7 @@ from chalicelib.core.integration_jira_cloud_issue import JIRACloudIntegrationIssue from chalicelib.utils import pg_client, helper -PROVIDER = schemas.IntegrationType.jira +PROVIDER = schemas.IntegrationType.JIRA def obfuscate_string(string): diff --git a/api/chalicelib/core/integrations_global.py b/api/chalicelib/core/integrations_global.py index 66cd38a74b..c7c79d7a53 100644 --- a/api/chalicelib/core/integrations_global.py +++ b/api/chalicelib/core/integrations_global.py @@ -9,52 +9,52 @@ def get_global_integrations_status(tenant_id, user_id, project_id): SELECT EXISTS((SELECT 1 FROM public.oauth_authentication WHERE user_id = %(user_id)s - AND provider = 'github')) AS {schemas.IntegrationType.github.value}, + AND provider = 'github')) AS {schemas.IntegrationType.GITHUB.value}, EXISTS((SELECT 1 FROM public.jira_cloud - WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value}, + WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.JIRA.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value}, + AND provider='bugsnag')) AS {schemas.IntegrationType.BUGSNAG.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value}, + AND provider='cloudwatch')) AS {schemas.IntegrationType.CLOUDWATCH.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='datadog')) AS {schemas.IntegrationType.datadog.value}, + AND provider='datadog')) AS {schemas.IntegrationType.DATADOG.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value}, + AND provider='newrelic')) AS {schemas.IntegrationType.NEWRELIC.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value}, + AND provider='rollbar')) AS {schemas.IntegrationType.ROLLBAR.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sentry')) AS {schemas.IntegrationType.sentry.value}, + AND provider='sentry')) AS {schemas.IntegrationType.SENTRY.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value}, + AND provider='stackdriver')) AS {schemas.IntegrationType.STACKDRIVER.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value}, + AND provider='sumologic')) AS {schemas.IntegrationType.SUMOLOGIC.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value}, + AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='slack' AND deleted_at ISNULL)) AS {schemas.IntegrationType.slack.value}, + WHERE type='slack' AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""", + WHERE type='msteams' AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value};""", {"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id}) ) current_integrations = cur.fetchone() diff --git a/api/chalicelib/core/metrics.py b/api/chalicelib/core/metrics.py index 4365568975..a012bbe146 100644 --- a/api/chalicelib/core/metrics.py +++ b/api/chalicelib/core/metrics.py @@ -94,25 +94,25 @@ def __get_meta_constraint(project_id, data): else: filter_type = f["key"].upper() filter_type = [filter_type, "USER" + filter_type, filter_type[4:]] - if any(item in [schemas.FilterType.user_browser] \ + if any(item in [schemas.FilterType.USER_BROWSER] \ for item in filter_type): constraints.append(f"sessions.user_browser = %({f['key']}_{i})s") - elif any(item in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile] \ + elif any(item in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE] \ for item in filter_type): constraints.append(f"sessions.user_os = %({f['key']}_{i})s") - elif any(item in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile] \ + elif any(item in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE] \ for item in filter_type): constraints.append(f"sessions.user_device = %({f['key']}_{i})s") - elif any(item in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile] \ + elif any(item in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE] \ for item in filter_type): constraints.append(f"sessions.user_country = %({f['key']}_{i})s") - elif any(item in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile] \ + elif any(item in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE] \ for item in filter_type): constraints.append(f"sessions.user_id = %({f['key']}_{i})s") - elif any(item in [schemas.FilterType.user_anonymous_id, schemas.FilterType.user_anonymous_id_mobile] \ + elif any(item in [schemas.FilterType.USER_ANONYMOUS_ID, schemas.FilterType.USER_ANONYMOUS_ID_MOBILE] \ for item in filter_type): constraints.append(f"sessions.user_anonymous_id = %({f['key']}_{i})s") - elif any(item in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile] \ + elif any(item in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE] \ for item in filter_type): constraints.append(f"sessions.rev_id = %({f['key']}_{i})s") return constraints @@ -167,7 +167,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) count = cur.fetchone()["count"] results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.count + results["unit"] = schemas.TemplatePredefinedUnits.COUNT return results @@ -1087,7 +1087,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- avg = cur.fetchone()["avg"] else: avg = 0 - return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MILLISECOND} def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1180,7 +1180,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( } for i, v in enumerate(quantiles_keys) ], "extremeValues": [{"count": 0}], - "unit": schemas.TemplatePredefinedUnits.millisecond + "unit": schemas.TemplatePredefinedUnits.MILLISECOND } rows = helper.list_to_camel_case(rows) _99 = result["percentiles"][-1]["responseTime"] @@ -1422,7 +1422,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1) WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.memory} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MEMORY} def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1455,7 +1455,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] return {"value": avg, "chart": helper.list_to_camel_case(rows), - "unit": schemas.TemplatePredefinedUnits.percentage} + "unit": schemas.TemplatePredefinedUnits.PERCENTAGE} def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1488,7 +1488,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), WHERE {" AND ".join(pg_sub_query)};""" cur.execute(cur.mogrify(pg_query, params)) avg = cur.fetchone()["avg"] - return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.frame} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.FRAME} def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1559,7 +1559,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), versions.append({v["version"]: v["count"] / (r["total"] / 100)}) r["versions"] = versions - return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.count} + return {"chart": rows, "browsers": browsers, "unit": schemas.TemplatePredefinedUnits.COUNT} def __get_neutral(rows, add_All_if_empty=True): @@ -1713,7 +1713,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), avg = cur.fetchone()["avg"] else: avg = 0 - return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} + return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.MILLISECOND} def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -2551,7 +2551,7 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d previous = helper.dict_to_camel_case(row) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.count + results["unit"] = schemas.TemplatePredefinedUnits.COUNT return results @@ -2911,7 +2911,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_ cur.execute(cur.mogrify(pg_query, {**params, **__get_constraint_values(args)})) rows = cur.fetchall() row["chart"] = rows - row["unit"] = schemas.TemplatePredefinedUnits.count + row["unit"] = schemas.TemplatePredefinedUnits.COUNT return helper.dict_to_camel_case(row) @@ -2960,5 +2960,5 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1), count = cur.fetchone()["count"] results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.count + results["unit"] = schemas.TemplatePredefinedUnits.COUNT return results diff --git a/api/chalicelib/core/performance_event.py b/api/chalicelib/core/performance_event.py index 76633ce40a..70fd63bfda 100644 --- a/api/chalicelib/core/performance_event.py +++ b/api/chalicelib/core/performance_event.py @@ -3,13 +3,13 @@ def get_col(perf: schemas.PerformanceEventType): return { - schemas.PerformanceEventType.location_dom_complete: {"column": "dom_building_time", "extraJoin": None}, - schemas.PerformanceEventType.location_ttfb: {"column": "ttfb", "extraJoin": None}, - schemas.PerformanceEventType.location_avg_cpu_load: {"column": "avg_cpu", "extraJoin": "events.performance"}, - schemas.PerformanceEventType.location_avg_memory_usage: {"column": "avg_used_js_heap_size", + schemas.PerformanceEventType.LOCATION_DOM_COMPLETE: {"column": "dom_building_time", "extraJoin": None}, + schemas.PerformanceEventType.LOCATION_TTFB: {"column": "ttfb", "extraJoin": None}, + schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: {"column": "avg_cpu", "extraJoin": "events.performance"}, + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: {"column": "avg_used_js_heap_size", "extraJoin": "events.performance"}, - schemas.PerformanceEventType.fetch_failed: {"column": "success", "extraJoin": None}, + schemas.PerformanceEventType.FETCH_FAILED: {"column": "success", "extraJoin": None}, # schemas.PerformanceEventType.fetch_duration: {"column": "duration", "extraJoin": None}, - schemas.PerformanceEventType.location_largest_contentful_paint_time: {"column": "first_contentful_paint_time", + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME: {"column": "first_contentful_paint_time", "extraJoin": None} }.get(perf) diff --git a/api/chalicelib/core/product_analytics.py b/api/chalicelib/core/product_analytics.py index fa61073d3c..acd165a60d 100644 --- a/api/chalicelib/core/product_analytics.py +++ b/api/chalicelib/core/product_analytics.py @@ -70,10 +70,10 @@ def __transform_journey(rows, reverse_path=False): JOURNEY_TYPES = { - schemas.ProductAnalyticsSelectedEventType.location: {"table": "events.pages", "column": "path"}, - schemas.ProductAnalyticsSelectedEventType.click: {"table": "events.clicks", "column": "label"}, - schemas.ProductAnalyticsSelectedEventType.input: {"table": "events.inputs", "column": "label"}, - schemas.ProductAnalyticsSelectedEventType.custom_event: {"table": "events_common.customs", "column": "name"} + schemas.ProductAnalyticsSelectedEventType.LOCATION: {"table": "events.pages", "column": "path"}, + schemas.ProductAnalyticsSelectedEventType.CLICK: {"table": "events.clicks", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.INPUT: {"table": "events.inputs", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.CUSTOM_EVENT: {"table": "events_common.customs", "column": "name"} } @@ -92,10 +92,10 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sessions_conditions = ["start_ts>=%(startTimestamp)s", "start_ts<%(endTimestamp)s", "project_id=%(project_id)s", "events_count > 1", "duration>0"] if len(data.metric_value) == 0: - data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location) - sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["table"], - "column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"], - "eventType": schemas.ProductAnalyticsSelectedEventType.location.value}) + data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.LOCATION) + sub_events.append({"table": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.LOCATION]["table"], + "column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.LOCATION]["column"], + "eventType": schemas.ProductAnalyticsSelectedEventType.LOCATION.value}) else: for v in data.metric_value: if JOURNEY_TYPES.get(v): @@ -149,49 +149,49 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): continue # ---- meta-filters - if f.type == schemas.FilterType.user_browser: + if f.type == schemas.FilterType.USER_BROWSER: if is_any: sessions_conditions.append('user_browser IS NOT NULL') else: sessions_conditions.append( sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_os]: + elif f.type in [schemas.FilterType.USER_OS]: if is_any: sessions_conditions.append('user_os IS NOT NULL') else: sessions_conditions.append( sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_device]: + elif f.type in [schemas.FilterType.USER_DEVICE]: if is_any: sessions_conditions.append('user_device IS NOT NULL') else: sessions_conditions.append( sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_country]: + elif f.type in [schemas.FilterType.USER_COUNTRY]: if is_any: sessions_conditions.append('user_country IS NOT NULL') else: sessions_conditions.append( sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.user_city: + elif f.type == schemas.FilterType.USER_CITY: if is_any: sessions_conditions.append('user_city IS NOT NULL') else: sessions_conditions.append( sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.user_state: + elif f.type == schemas.FilterType.USER_STATE: if is_any: sessions_conditions.append('user_state IS NOT NULL') else: sessions_conditions.append( sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.utm_source]: + elif f.type in [schemas.FilterType.UTM_SOURCE]: if is_any: sessions_conditions.append('utm_source IS NOT NULL') elif is_undefined: @@ -201,7 +201,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f'utm_source {op} %({f_k})s::text', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.utm_medium]: + elif f.type in [schemas.FilterType.UTM_MEDIUM]: if is_any: sessions_conditions.append('utm_medium IS NOT NULL') elif is_undefined: @@ -211,7 +211,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f'utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.utm_campaign]: + elif f.type in [schemas.FilterType.UTM_CAMPAIGN]: if is_any: sessions_conditions.append('utm_campaign IS NOT NULL') elif is_undefined: @@ -221,14 +221,14 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f'utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.duration: + elif f.type == schemas.FilterType.DURATION: if len(f.value) > 0 and f.value[0] is not None: sessions_conditions.append("duration >= %(minDuration)s") extra_values["minDuration"] = f.value[0] if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: sessions_conditions.append("duration <= %(maxDuration)s") extra_values["maxDuration"] = f.value[1] - elif f.type == schemas.FilterType.referrer: + elif f.type == schemas.FilterType.REFERRER: # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" if is_any: sessions_conditions.append('base_referrer IS NOT NULL') @@ -236,7 +236,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sessions_conditions.append( sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.metadata: + elif f.type == schemas.FilterType.METADATA: # get metadata list only if you need it if meta_keys is None: meta_keys = metadata.get(project_id=project_id) @@ -252,7 +252,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): f"{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: + elif f.type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]: if is_any: sessions_conditions.append('user_id IS NOT NULL') elif is_undefined: @@ -262,8 +262,8 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f"user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_mobile]: + elif f.type in [schemas.FilterType.USER_ANONYMOUS_ID, + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]: if is_any: sessions_conditions.append('user_anonymous_id IS NOT NULL') elif is_undefined: @@ -273,7 +273,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f"user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]: + elif f.type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]: if is_any: sessions_conditions.append('rev_id IS NOT NULL') elif is_undefined: @@ -282,13 +282,13 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sessions_conditions.append( sh.multi_conditions(f"rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.platform: + elif f.type == schemas.FilterType.PLATFORM: # op = __ sh.get_sql_operator(f.operator) sessions_conditions.append( sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.issue: + elif f.type == schemas.FilterType.ISSUE: if is_any: sessions_conditions.append("array_length(issue_types, 1) > 0") else: @@ -296,7 +296,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f"%({f_k})s {op} ANY (issue_types)", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.events_count: + elif f.type == schemas.FilterType.EVENTS_COUNT: sessions_conditions.append( sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) diff --git a/api/chalicelib/core/sessions.py b/api/chalicelib/core/sessions.py index d230e88108..0b68a993cf 100644 --- a/api/chalicelib/core/sessions.py +++ b/api/chalicelib/core/sessions.py @@ -40,7 +40,7 @@ # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False, + error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False, platform="web"): if data.bookmarked: data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id) @@ -74,12 +74,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.DESC.value else: data.order = data.order if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) - g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" + g_sort = f"{'MIN' if data.order == schemas.SortOrderType.DESC else 'MAX'}({sort})" else: sort = 'start_ts' @@ -109,7 +109,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.DESC.value else: data.order = data.order sort = 'session_id' @@ -176,20 +176,20 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp, density=density, factor=1, decimal=True)) extra_event = None - if metric_of == schemas.MetricOfTable.visited_url: + if metric_of == schemas.MetricOfTable.VISITED_URL: extra_event = "events.pages" - elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: - data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, - operator=schemas.SearchEventOperator._is)) + elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0: + data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE, + operator=schemas.SearchEventOperator.IS)) full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, favorite_only=False, issue=None, project_id=project_id, user_id=None, extra_event=extra_event) full_args["step_size"] = step_size sessions = [] with pg_client.PostgresClient() as cur: - if metric_type == schemas.MetricType.timeseries: - if view_type == schemas.MetricTimeseriesViewType.line_chart: - if metric_of == schemas.MetricOfTimeseries.session_count: + if metric_type == schemas.MetricType.TIMESERIES: + if view_type == schemas.MetricTimeseriesViewType.LINE_CHART: + if metric_of == schemas.MetricOfTimeseries.SESSION_COUNT: # main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT DISTINCT ON(s.session_id) s.session_id, s.start_ts main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.session_id, s.start_ts {query_part}) @@ -202,7 +202,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d AND start_ts <= generated_timestamp + %(step_size)s) AS sessions ON (TRUE) GROUP BY generated_timestamp ORDER BY generated_timestamp;""", full_args) - elif metric_of == schemas.MetricOfTimeseries.user_count: + elif metric_of == schemas.MetricOfTimeseries.USER_COUNT: main_query = cur.mogrify(f"""WITH full_sessions AS (SELECT s.user_id, s.start_ts {query_part} AND s.user_id IS NOT NULL @@ -234,24 +234,24 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d logging.warning(data.model_dump_json()) logging.warning("--------------------") raise err - if view_type == schemas.MetricTimeseriesViewType.line_chart: + if view_type == schemas.MetricTimeseriesViewType.LINE_CHART: sessions = cur.fetchall() else: sessions = cur.fetchone()["count"] - elif metric_type == schemas.MetricType.table: + elif metric_type == schemas.MetricType.TABLE: if isinstance(metric_of, schemas.MetricOfTable): main_col = "user_id" extra_col = "" extra_where = "" pre_query = "" distinct_on = "s.session_id" - if metric_of == schemas.MetricOfTable.user_country: + if metric_of == schemas.MetricOfTable.USER_COUNTRY: main_col = "user_country" - elif metric_of == schemas.MetricOfTable.user_device: + elif metric_of == schemas.MetricOfTable.USER_DEVICE: main_col = "user_device" - elif metric_of == schemas.MetricOfTable.user_browser: + elif metric_of == schemas.MetricOfTable.USER_BROWSER: main_col = "user_browser" - elif metric_of == schemas.MetricOfTable.issues: + elif metric_of == schemas.MetricOfTable.ISSUES: main_col = "issue" extra_col = f", UNNEST(s.issue_types) AS {main_col}" if len(metric_value) > 0: @@ -261,7 +261,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d extra_where.append(f"{main_col} = %({arg_name})s") full_args[arg_name] = metric_value[i] extra_where = f"WHERE ({' OR '.join(extra_where)})" - elif metric_of == schemas.MetricOfTable.visited_url: + elif metric_of == schemas.MetricOfTable.VISITED_URL: main_col = "path" extra_col = ", path" distinct_on += ",path" @@ -302,11 +302,11 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de density=density, factor=1, decimal=True)) extra_event = None extra_conditions = None - if metric_of == schemas.MetricOfTable.visited_url: + if metric_of == schemas.MetricOfTable.VISITED_URL: extra_event = "events.pages" extra_conditions = {} for e in data.events: - if e.type == schemas.EventType.location: + if e.type == schemas.EventType.LOCATION: if e.operator not in extra_conditions: extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({ "type": e.type, @@ -320,9 +320,9 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de extra_conditions[e.operator].value.append(v) extra_conditions = list(extra_conditions.values()) - elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: - data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, - operator=schemas.SearchEventOperator._is)) + elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0: + data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE, + operator=schemas.SearchEventOperator.IS)) full_args, query_part = search_query_parts(data=data, error_status=None, errors_only=False, favorite_only=False, issue=None, project_id=project_id, user_id=None, extra_event=extra_event, extra_conditions=extra_conditions) @@ -337,13 +337,13 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de extra_col = "" extra_where = "" distinct_on = "s.session_id" - if metric_of == schemas.MetricOfTable.user_country: + if metric_of == schemas.MetricOfTable.USER_COUNTRY: main_col = "user_country" - elif metric_of == schemas.MetricOfTable.user_device: + elif metric_of == schemas.MetricOfTable.USER_DEVICE: main_col = "user_device" - elif metric_of == schemas.MetricOfTable.user_browser: + elif metric_of == schemas.MetricOfTable.USER_BROWSER: main_col = "user_browser" - elif metric_of == schemas.MetricOfTable.issues: + elif metric_of == schemas.MetricOfTable.ISSUES: main_col = "issue" extra_col = f", UNNEST(s.issue_types) AS {main_col}" if len(metric_value) > 0: @@ -353,11 +353,11 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de extra_where.append(f"{main_col} = %({arg_name})s") full_args[arg_name] = metric_value[i] extra_where = f"WHERE ({' OR '.join(extra_where)})" - elif metric_of == schemas.MetricOfTable.visited_url: + elif metric_of == schemas.MetricOfTable.VISITED_URL: main_col = "path" extra_col = ", path" distinct_on += ",path" - if metric_format == schemas.MetricExtendedFormatType.session_count: + if metric_format == schemas.MetricExtendedFormatType.SESSION_COUNT: main_query = f"""SELECT COUNT(*) AS count, COALESCE(SUM(users_sessions.session_count),0) AS count, COALESCE(JSONB_AGG(users_sessions) @@ -448,15 +448,15 @@ def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2): - return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, - schemas.EventType.graphql] \ - or event.type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage + return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS, + schemas.EventType.GRAPHQL] \ + or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME, + schemas.PerformanceEventType.LOCATION_TTFB, + schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD, + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE ] and (event.source is None or len(event.source) == 0) \ - or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and ( + or event.type in [schemas.EventType.REQUEST_DETAILS, schemas.EventType.GRAPHQL] and ( event.filters is None or len(event.filters) == 0)) @@ -483,7 +483,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, f_k = f"f_value{i}" full_args = {**full_args, **sh.multi_values(f.value, value_key=f_k)} op = sh.get_sql_operator(f.operator) \ - if filter_type not in [schemas.FilterType.events_count] else f.operator.value + if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator.value is_any = sh.isAny_opreator(f.operator) is_undefined = sh.isUndefined_operator(f.operator) if not is_any and not is_undefined and len(f.value) == 0: @@ -491,7 +491,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, is_not = False if sh.is_negation_operator(f.operator): is_not = True - if filter_type == schemas.FilterType.user_browser: + if filter_type == schemas.FilterType.USER_BROWSER: if is_any: extra_constraints.append('s.user_browser IS NOT NULL') ss_constraints.append('ms.user_browser IS NOT NULL') @@ -502,7 +502,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]: + elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]: if is_any: extra_constraints.append('s.user_os IS NOT NULL') ss_constraints.append('ms.user_os IS NOT NULL') @@ -512,7 +512,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]: + elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]: if is_any: extra_constraints.append('s.user_device IS NOT NULL') ss_constraints.append('ms.user_device IS NOT NULL') @@ -522,7 +522,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]: + elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]: if is_any: extra_constraints.append('s.user_country IS NOT NULL') ss_constraints.append('ms.user_country IS NOT NULL') @@ -533,7 +533,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.user_city: + elif filter_type == schemas.FilterType.USER_CITY: if is_any: extra_constraints.append('s.user_city IS NOT NULL') ss_constraints.append('ms.user_city IS NOT NULL') @@ -544,7 +544,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.user_state: + elif filter_type == schemas.FilterType.USER_STATE: if is_any: extra_constraints.append('s.user_state IS NOT NULL') ss_constraints.append('ms.user_state IS NOT NULL') @@ -555,7 +555,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_source]: + elif filter_type in [schemas.FilterType.UTM_SOURCE]: if is_any: extra_constraints.append('s.utm_source IS NOT NULL') ss_constraints.append('ms.utm_source IS NOT NULL') @@ -569,7 +569,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f'ms.utm_source {op} %({f_k})s::text', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_medium]: + elif filter_type in [schemas.FilterType.UTM_MEDIUM]: if is_any: extra_constraints.append('s.utm_medium IS NOT NULL') ss_constraints.append('ms.utm_medium IS NOT NULL') @@ -583,7 +583,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f'ms.utm_medium {op} %({f_k})s::text', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_campaign]: + elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]: if is_any: extra_constraints.append('s.utm_campaign IS NOT NULL') ss_constraints.append('ms.utm_campaign IS NOT NULL') @@ -598,7 +598,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions(f'ms.utm_campaign {op} %({f_k})s::text', f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.duration: + elif filter_type == schemas.FilterType.DURATION: if len(f.value) > 0 and f.value[0] is not None: extra_constraints.append("s.duration >= %(minDuration)s") ss_constraints.append("ms.duration >= %(minDuration)s") @@ -607,7 +607,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, extra_constraints.append("s.duration <= %(maxDuration)s") ss_constraints.append("ms.duration <= %(maxDuration)s") full_args["maxDuration"] = f.value[1] - elif filter_type == schemas.FilterType.referrer: + elif filter_type == schemas.FilterType.REFERRER: # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" if is_any: extra_constraints.append('s.base_referrer IS NOT NULL') @@ -636,7 +636,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions( f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]: if is_any: extra_constraints.append('s.user_id IS NOT NULL') ss_constraints.append('ms.user_id IS NOT NULL') @@ -650,8 +650,8 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f"ms.user_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID, + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]: if is_any: extra_constraints.append('s.user_anonymous_id IS NOT NULL') ss_constraints.append('ms.user_anonymous_id IS NOT NULL') @@ -665,7 +665,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f"ms.user_anonymous_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]: + elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]: if is_any: extra_constraints.append('s.rev_id IS NOT NULL') ss_constraints.append('ms.rev_id IS NOT NULL') @@ -678,7 +678,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f"ms.rev_id {op} %({f_k})s::text", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.platform: + elif filter_type == schemas.FilterType.PLATFORM: # op = __ sh.get_sql_operator(f.operator) extra_constraints.append( sh.multi_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not, @@ -686,7 +686,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, ss_constraints.append( sh.multi_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.issue: + elif filter_type == schemas.FilterType.ISSUE: if is_any: extra_constraints.append("array_length(s.issue_types, 1) > 0") ss_constraints.append("array_length(ms.issue_types, 1) > 0") @@ -701,7 +701,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions(f"%({f_k})s {op} ANY (ms.issue_types)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.events_count: + elif filter_type == schemas.FilterType.EVENTS_COUNT: extra_constraints.append( sh.multi_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) @@ -719,7 +719,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, valid_events_count += 1 events_query_from = [] event_index = 0 - or_events = data.events_order == schemas.SearchEventOrder._or + or_events = data.events_order == schemas.SearchEventOrder.OR # events_joiner = " FULL JOIN " if or_events else " INNER JOIN LATERAL " events_joiner = " UNION " if or_events else " INNER JOIN LATERAL " for i, event in enumerate(data.events): @@ -746,7 +746,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, event_from = "%s" event_where = ["main.timestamp >= %(startDate)s", "main.timestamp <= %(endDate)s", "main.session_id=event_0.session_id"] - if data.events_order == schemas.SearchEventOrder._then: + if data.events_order == schemas.SearchEventOrder.THEN: event_where.append(f"event_{event_index - 1}.timestamp <= main.timestamp") e_k = f"e_value{i}" s_k = e_k + "_source" @@ -760,7 +760,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, if platform == "web": event_from = event_from % f"{events.EventType.CLICK.table} AS main " if not is_any: - if event.operator == schemas.ClickEventExtraOperator._on_selector: + if event.operator == schemas.ClickEventExtraOperator.ON_SELECTOR: event_where.append( sh.multi_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k)) else: @@ -897,7 +897,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, event_where.append( sh.multi_conditions(f"(main1.reason {op} %({e_k})s OR main1.name {op} %({e_k})s)", event.value, value_key=e_k)) - elif event_type == schemas.PerformanceEventType.fetch_failed: + elif event_type == schemas.PerformanceEventType.FETCH_FAILED: event_from = event_from % f"{events.EventType.REQUEST.table} AS main " if not is_any: event_where.append( @@ -920,11 +920,11 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, # event_where.append(f"{tname}.{colname} IS NOT NULL AND {tname}.{colname}>0 AND " + # _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", # event.source, value_key=e_k)) - elif event_type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage + elif event_type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME, + schemas.PerformanceEventType.LOCATION_TTFB, + schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD, + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE ]: event_from = event_from % f"{events.EventType.LOCATION.table} AS main " col = performance_event.get_col(event_type) @@ -946,7 +946,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, sh.multi_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", event.source, value_key=e_k)) - elif event_type == schemas.EventType.request_details: + elif event_type == schemas.EventType.REQUEST_DETAILS: event_from = event_from % f"{events.EventType.REQUEST.table} AS main " apply = False for j, f in enumerate(event.filters): @@ -957,31 +957,31 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, op = sh.get_sql_operator(f.operator) e_k_f = e_k + f"_fetch{j}" full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.FetchFilterType._url: + if f.type == schemas.FetchFilterType.FETCH_URL: event_where.append( sh.multi_conditions(f"main.{events.EventType.REQUEST.column} {op} %({e_k_f})s::text", f.value, value_key=e_k_f)) apply = True - elif f.type == schemas.FetchFilterType._status_code: + elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE: event_where.append( sh.multi_conditions(f"main.status_code {f.operator} %({e_k_f})s::integer", f.value, value_key=e_k_f)) apply = True - elif f.type == schemas.FetchFilterType._method: + elif f.type == schemas.FetchFilterType.FETCH_METHOD: event_where.append( sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) apply = True - elif f.type == schemas.FetchFilterType._duration: + elif f.type == schemas.FetchFilterType.FETCH_DURATION: event_where.append( sh.multi_conditions(f"main.duration {f.operator} %({e_k_f})s::integer", f.value, value_key=e_k_f)) apply = True - elif f.type == schemas.FetchFilterType._request_body: + elif f.type == schemas.FetchFilterType.FETCH_REQUEST_BODY: event_where.append( sh.multi_conditions(f"main.request_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f)) apply = True - elif f.type == schemas.FetchFilterType._response_body: + elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY: event_where.append( sh.multi_conditions(f"main.response_body {op} %({e_k_f})s::text", f.value, value_key=e_k_f)) @@ -990,7 +990,7 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, logging.warning(f"undefined FETCH filter: {f.type}") if not apply: continue - elif event_type == schemas.EventType.graphql: + elif event_type == schemas.EventType.GRAPHQL: event_from = event_from % f"{events.EventType.GRAPHQL.table} AS main " for j, f in enumerate(event.filters): is_any = sh.isAny_opreator(f.operator) @@ -1000,17 +1000,17 @@ def search_query_parts(data: schemas.SessionsSearchPayloadSchema, error_status, op = sh.get_sql_operator(f.operator) e_k_f = e_k + f"_graphql{j}" full_args = {**full_args, **sh.multi_values(f.value, value_key=e_k_f)} - if f.type == schemas.GraphqlFilterType._name: + if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME: event_where.append( sh.multi_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._method: + elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD: event_where.append( sh.multi_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._request_body: + elif f.type == schemas.GraphqlFilterType.GRAPHQL_REQUEST_BODY: event_where.append( sh.multi_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) - elif f.type == schemas.GraphqlFilterType._response_body: + elif f.type == schemas.GraphqlFilterType.GRAPHQL_RESPONSE_BODY: event_where.append( sh.multi_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) else: @@ -1190,8 +1190,8 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): available_keys = metadata.get_keys_by_projects(project_ids) for i in available_keys: - available_keys[i]["user_id"] = schemas.FilterType.user_id - available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id + available_keys[i]["user_id"] = schemas.FilterType.USER_ID + available_keys[i]["user_anonymous_id"] = schemas.FilterType.USER_ANONYMOUS_ID results = {} for i in project_ids: if m_key not in available_keys[i].values(): diff --git a/api/chalicelib/core/sessions_metas.py b/api/chalicelib/core/sessions_metas.py index 826cc6a7f1..6cfd0bad5a 100644 --- a/api/chalicelib/core/sessions_metas.py +++ b/api/chalicelib/core/sessions_metas.py @@ -3,65 +3,65 @@ from chalicelib.utils.event_filter_definition import SupportedFilter SUPPORTED_TYPES = { - schemas.FilterType.user_os: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os)), - schemas.FilterType.user_browser: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser)), - schemas.FilterType.user_device: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device)), - schemas.FilterType.user_country: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country)), - schemas.FilterType.user_city: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city)), - schemas.FilterType.user_state: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state)), - schemas.FilterType.user_id: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id)), - schemas.FilterType.user_anonymous_id: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id)), - schemas.FilterType.rev_id: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id)), - schemas.FilterType.referrer: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer)), - schemas.FilterType.utm_campaign: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign)), - schemas.FilterType.utm_medium: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium)), - schemas.FilterType.utm_source: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source)), + schemas.FilterType.USER_OS: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS)), + schemas.FilterType.USER_BROWSER: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER)), + schemas.FilterType.USER_DEVICE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE)), + schemas.FilterType.USER_COUNTRY: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY)), + schemas.FilterType.USER_CITY: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY)), + schemas.FilterType.USER_STATE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE)), + schemas.FilterType.USER_ID: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID)), + schemas.FilterType.USER_ANONYMOUS_ID: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID)), + schemas.FilterType.REV_ID: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID)), + schemas.FilterType.REFERRER: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER)), + schemas.FilterType.UTM_CAMPAIGN: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN)), + schemas.FilterType.UTM_MEDIUM: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM)), + schemas.FilterType.UTM_SOURCE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE)), # IOS - schemas.FilterType.user_os_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile)), - schemas.FilterType.user_device_mobile: SupportedFilter( + schemas.FilterType.USER_OS_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE)), + schemas.FilterType.USER_DEVICE_MOBILE: SupportedFilter( get=autocomplete.__generic_autocomplete_metas( - typename=schemas.FilterType.user_device_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device_mobile)), - schemas.FilterType.user_country_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile)), - schemas.FilterType.user_id_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile)), - schemas.FilterType.user_anonymous_id_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile)), - schemas.FilterType.rev_id_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile)), + typename=schemas.FilterType.USER_DEVICE_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE_MOBILE)), + schemas.FilterType.USER_COUNTRY_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE)), + schemas.FilterType.USER_ID_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE)), + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE)), + schemas.FilterType.REV_ID_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE)), } diff --git a/api/chalicelib/core/significance.py b/api/chalicelib/core/significance.py index 3ccc222403..cf6755c2e1 100644 --- a/api/chalicelib/core/significance.py +++ b/api/chalicelib/core/significance.py @@ -57,29 +57,29 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) is_not = False if sh.is_negation_operator(f.operator): is_not = True - if filter_type == schemas.FilterType.user_browser: + if filter_type == schemas.FilterType.USER_BROWSER: first_stage_extra_constraints.append( sh.multi_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]: + elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]: first_stage_extra_constraints.append( sh.multi_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]: + elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]: first_stage_extra_constraints.append( sh.multi_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]: + elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]: first_stage_extra_constraints.append( sh.multi_conditions(f's.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.duration: + elif filter_type == schemas.FilterType.DURATION: if len(f.value) > 0 and f.value[0] is not None: first_stage_extra_constraints.append(f's.duration >= %(minDuration)s') values["minDuration"] = f.value[0] if len(f["value"]) > 1 and f.value[1] is not None and int(f.value[1]) > 0: first_stage_extra_constraints.append('s.duration <= %(maxDuration)s') values["maxDuration"] = f.value[1] - elif filter_type == schemas.FilterType.referrer: + elif filter_type == schemas.FilterType.REFERRER: # events_query_part = events_query_part + f"INNER JOIN events.pages AS p USING(session_id)" filter_extra_from = [f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)"] first_stage_extra_constraints.append( @@ -94,16 +94,16 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) f's.{metadata.index_to_colname(meta_keys[f.source])} {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op) - elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]: first_stage_extra_constraints.append( sh.multi_conditions(f's.user_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op) - elif filter_type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID, + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]: first_stage_extra_constraints.append( sh.multi_conditions(f's.user_anonymous_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op) - elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]: + elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]: first_stage_extra_constraints.append( sh.multi_conditions(f's.rev_id {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) # values[f_k] = helper.string_to_sql_like_with_op(f["value"][0], op) @@ -111,7 +111,7 @@ def get_stages_and_events(filter_d: schemas.CardSeriesFilterSchema, project_id) for s in stages: if s.operator is None: - s.operator = schemas.SearchEventOperator._is + s.operator = schemas.SearchEventOperator.IS if not isinstance(s.value, list): s.value = [s.value] @@ -431,7 +431,7 @@ def count_users(rows, n_stages, user_key="user_uuid"): return users_count -def get_stages(stages, rows, metric_of=schemas.MetricOfFunnels.session_count): +def get_stages(stages, rows, metric_of=schemas.MetricOfFunnels.SESSION_COUNT): n_stages = len(stages) if metric_of == "sessionCount": base_counts = count_sessions(rows, n_stages) diff --git a/api/chalicelib/core/webhook.py b/api/chalicelib/core/webhook.py index 309a9b1c2c..f3915cbd46 100644 --- a/api/chalicelib/core/webhook.py +++ b/api/chalicelib/core/webhook.py @@ -107,7 +107,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", return w -def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.webhook, +def exists_by_name(name: str, exclude_id: Optional[int], webhook_type: str = schemas.WebhookType.WEBHOOK, tenant_id: Optional[int] = None) -> bool: with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT EXISTS(SELECT 1 diff --git a/api/chalicelib/utils/helper.py b/api/chalicelib/utils/helper.py index 08a8ce2865..48a6cec14d 100644 --- a/api/chalicelib/utils/helper.py +++ b/api/chalicelib/utils/helper.py @@ -143,8 +143,8 @@ def string_to_sql_like_with_op(value, op): return _value.replace("%", "%%") -likable_operators = [schemas.SearchEventOperator._starts_with, schemas.SearchEventOperator._ends_with, - schemas.SearchEventOperator._contains, schemas.SearchEventOperator._not_contains] +likable_operators = [schemas.SearchEventOperator.STARTS_WITH, schemas.SearchEventOperator.ENDS_WITH, + schemas.SearchEventOperator.CONTAINS, schemas.SearchEventOperator.NOT_CONTAINS] def is_likable(op: schemas.SearchEventOperator): @@ -162,11 +162,11 @@ def values_for_operator(value: Union[str, list], op: schemas.SearchEventOperator else: if value is None: return value - if op == schemas.SearchEventOperator._starts_with: + if op == schemas.SearchEventOperator.STARTS_WITH: return f"{value}%" - elif op == schemas.SearchEventOperator._ends_with: + elif op == schemas.SearchEventOperator.ENDS_WITH: return f"%{value}" - elif op == schemas.SearchEventOperator._contains or op == schemas.SearchEventOperator._not_contains: + elif op == schemas.SearchEventOperator.CONTAINS or op == schemas.SearchEventOperator.NOT_CONTAINS: return f"%{value}%" return value @@ -278,22 +278,22 @@ def old_search_payload_to_flat(values): def custom_alert_to_front(values): # to support frontend format for payload - if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.custom: + if values.get("seriesId") is not None and values["query"]["left"] == schemas.AlertColumn.CUSTOM: values["query"]["left"] = values["seriesId"] values["seriesId"] = None return values def __time_value(row): - row["unit"] = schemas.TemplatePredefinedUnits.millisecond + row["unit"] = schemas.TemplatePredefinedUnits.MILLISECOND factor = 1 if row["value"] > TimeUTC.MS_MINUTE: row["value"] = row["value"] / TimeUTC.MS_MINUTE - row["unit"] = schemas.TemplatePredefinedUnits.minute + row["unit"] = schemas.TemplatePredefinedUnits.MINUTE factor = TimeUTC.MS_MINUTE elif row["value"] > 1 * 1000: row["value"] = row["value"] / 1000 - row["unit"] = schemas.TemplatePredefinedUnits.second + row["unit"] = schemas.TemplatePredefinedUnits.SECOND factor = 1000 if "chart" in row and factor > 1: diff --git a/api/chalicelib/utils/sql_helper.py b/api/chalicelib/utils/sql_helper.py index 02744595a1..c587cfab93 100644 --- a/api/chalicelib/utils/sql_helper.py +++ b/api/chalicelib/utils/sql_helper.py @@ -5,23 +5,23 @@ def get_sql_operator(op: Union[schemas.SearchEventOperator, schemas.ClickEventExtraOperator]): return { - schemas.SearchEventOperator._is: "=", - schemas.SearchEventOperator._is_any: "IN", - schemas.SearchEventOperator._on: "=", - schemas.SearchEventOperator._on_any: "IN", - schemas.SearchEventOperator._is_not: "!=", - schemas.SearchEventOperator._not_on: "!=", - schemas.SearchEventOperator._contains: "ILIKE", - schemas.SearchEventOperator._not_contains: "NOT ILIKE", - schemas.SearchEventOperator._starts_with: "ILIKE", - schemas.SearchEventOperator._ends_with: "ILIKE", + schemas.SearchEventOperator.IS: "=", + schemas.SearchEventOperator.IS_ANY: "IN", + schemas.SearchEventOperator.ON: "=", + schemas.SearchEventOperator.ON_ANY: "IN", + schemas.SearchEventOperator.IS_NOT: "!=", + schemas.SearchEventOperator.NOT_ON: "!=", + schemas.SearchEventOperator.CONTAINS: "ILIKE", + schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE", + schemas.SearchEventOperator.STARTS_WITH: "ILIKE", + schemas.SearchEventOperator.ENDS_WITH: "ILIKE", }.get(op, "=") def is_negation_operator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._is_not, - schemas.SearchEventOperator._not_on, - schemas.SearchEventOperator._not_contains] + return op in [schemas.SearchEventOperator.IS_NOT, + schemas.SearchEventOperator.NOT_ON, + schemas.SearchEventOperator.NOT_CONTAINS] def reverse_sql_operator(op): @@ -46,8 +46,8 @@ def multi_values(values, value_key="value"): def isAny_opreator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any] + return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY] def isUndefined_operator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._is_undefined] + return op in [schemas.SearchEventOperator.IS_UNDEFINED] diff --git a/api/routers/core.py b/api/routers/core.py index 40bf265205..083cffc2a8 100644 --- a/api/routers/core.py +++ b/api/routers/core.py @@ -31,20 +31,20 @@ def events_search(projectId: int, q: str, if live: return assist.autocomplete(project_id=projectId, q=q, key=key if key is not None else type) - if type in [schemas.FetchFilterType._url]: - type = schemas.EventType.request - elif type in [schemas.GraphqlFilterType._name]: - type = schemas.EventType.graphql + if type in [schemas.FetchFilterType.FETCH_URL]: + type = schemas.EventType.REQUEST + elif type in [schemas.GraphqlFilterType.GRAPHQL_NAME]: + type = schemas.EventType.GRAPHQL elif isinstance(type, schemas.PerformanceEventType): - if type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage + if type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME, + schemas.PerformanceEventType.LOCATION_TTFB, + schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD, + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE ]: - type = schemas.EventType.location - elif type in [schemas.PerformanceEventType.fetch_failed]: - type = schemas.EventType.request + type = schemas.EventType.LOCATION + elif type in [schemas.PerformanceEventType.FETCH_FAILED]: + type = schemas.EventType.REQUEST else: return {"data": []} @@ -72,12 +72,12 @@ def integration_notify(projectId: int, integration: str, webhookId: int, source: "user": context.email, "comment": comment, "project_id": projectId, "integration_id": webhookId, "project_name": context.project.name} - if integration == schemas.WebhookType.slack: + if integration == schemas.WebhookType.SLACK: if source == "sessions": return Slack.share_session(session_id=sourceId, **args) elif source == "errors": return Slack.share_error(error_id=sourceId, **args) - elif integration == schemas.WebhookType.msteams: + elif integration == schemas.WebhookType.MSTEAMS: if source == "sessions": return MSTeams.share_session(session_id=sourceId, **args) elif source == "errors": @@ -711,7 +711,7 @@ def get_boarding_state_integrations(context: schemas.CurrentContext = Depends(OR @app.get('/integrations/slack/channels', tags=["integrations"]) def get_slack_channels(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.slack)} + return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.SLACK)} @app.get('/integrations/slack/{integrationId}', tags=["integrations"]) @@ -808,7 +808,7 @@ def get_limits(context: schemas.CurrentContext = Depends(OR_context)): @app.get('/integrations/msteams/channels', tags=["integrations"]) def get_msteams_channels(context: schemas.CurrentContext = Depends(OR_context)): - return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.msteams)} + return {"data": webhook.get_by_type(tenant_id=context.tenant_id, webhook_type=schemas.WebhookType.MSTEAMS)} @app.post('/integrations/msteams', tags=['integrations']) diff --git a/api/schemas/schemas.py b/api/schemas/schemas.py index 296d66021b..a51a6a7a61 100644 --- a/api/schemas/schemas.py +++ b/api/schemas/schemas.py @@ -16,50 +16,50 @@ def transform_old_filter_type(cls, values): return values values["type"] = { # filters - "USEROS": FilterType.user_os.value, - "USERBROWSER": FilterType.user_browser.value, - "USERDEVICE": FilterType.user_device.value, - "USERCOUNTRY": FilterType.user_country.value, - "USERID": FilterType.user_id.value, - "USERANONYMOUSID": FilterType.user_anonymous_id.value, - "REFERRER": FilterType.referrer.value, - "REVID": FilterType.rev_id.value, - "USEROS_IOS": FilterType.user_os_mobile.value, - "USERDEVICE_IOS": FilterType.user_device_mobile.value, - "USERCOUNTRY_IOS": FilterType.user_country_mobile.value, - "USERID_IOS": FilterType.user_id_mobile.value, - "USERANONYMOUSID_IOS": FilterType.user_anonymous_id_mobile.value, - "REVID_IOS": FilterType.rev_id_mobile.value, - "DURATION": FilterType.duration.value, - "PLATFORM": FilterType.platform.value, - "METADATA": FilterType.metadata.value, - "ISSUE": FilterType.issue.value, - "EVENTS_COUNT": FilterType.events_count.value, - "UTM_SOURCE": FilterType.utm_source.value, - "UTM_MEDIUM": FilterType.utm_medium.value, - "UTM_CAMPAIGN": FilterType.utm_campaign.value, + "USEROS": FilterType.USER_OS.value, + "USERBROWSER": FilterType.USER_BROWSER.value, + "USERDEVICE": FilterType.USER_DEVICE.value, + "USERCOUNTRY": FilterType.USER_COUNTRY.value, + "USERID": FilterType.USER_ID.value, + "USERANONYMOUSID": FilterType.USER_ANONYMOUS_ID.value, + "REFERRER": FilterType.REFERRER.value, + "REVID": FilterType.REV_ID.value, + "USEROS_IOS": FilterType.USER_OS_MOBILE.value, + "USERDEVICE_IOS": FilterType.USER_DEVICE_MOBILE.value, + "USERCOUNTRY_IOS": FilterType.USER_COUNTRY_MOBILE.value, + "USERID_IOS": FilterType.USER_ID_MOBILE.value, + "USERANONYMOUSID_IOS": FilterType.USER_ANONYMOUS_ID_MOBILE.value, + "REVID_IOS": FilterType.REV_ID_MOBILE.value, + "DURATION": FilterType.DURATION.value, + "PLATFORM": FilterType.PLATFORM.value, + "METADATA": FilterType.METADATA.value, + "ISSUE": FilterType.ISSUE.value, + "EVENTS_COUNT": FilterType.EVENTS_COUNT.value, + "UTM_SOURCE": FilterType.UTM_SOURCE.value, + "UTM_MEDIUM": FilterType.UTM_MEDIUM.value, + "UTM_CAMPAIGN": FilterType.UTM_CAMPAIGN.value, # events: - "CLICK": EventType.click.value, - "INPUT": EventType.input.value, - "LOCATION": EventType.location.value, - "CUSTOM": EventType.custom.value, - "REQUEST": EventType.request.value, - "FETCH": EventType.request_details.value, - "GRAPHQL": EventType.graphql.value, - "STATEACTION": EventType.state_action.value, - "ERROR": EventType.error.value, - "CLICK_MOBILE": EventType.click_mobile.value, - "INPUT_MOBILE": EventType.input_mobile.value, - "VIEW_MOBILE": EventType.view_mobile.value, - "CUSTOM_MOBILE": EventType.custom_mobile.value, - "REQUEST_MOBILE": EventType.request_mobile.value, - "ERROR_MOBILE": EventType.error_mobile.value, - "DOM_COMPLETE": PerformanceEventType.location_dom_complete.value, - "LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.location_largest_contentful_paint_time.value, - "TTFB": PerformanceEventType.location_ttfb.value, - "AVG_CPU_LOAD": PerformanceEventType.location_avg_cpu_load.value, - "AVG_MEMORY_USAGE": PerformanceEventType.location_avg_memory_usage.value, - "FETCH_FAILED": PerformanceEventType.fetch_failed.value, + "CLICK": EventType.CLICK.value, + "INPUT": EventType.INPUT.value, + "LOCATION": EventType.LOCATION.value, + "CUSTOM": EventType.CUSTOM.value, + "REQUEST": EventType.REQUEST.value, + "FETCH": EventType.REQUEST_DETAILS.value, + "GRAPHQL": EventType.GRAPHQL.value, + "STATEACTION": EventType.STATE_ACTION.value, + "ERROR": EventType.ERROR.value, + "CLICK_MOBILE": EventType.CLICK_MOBILE.value, + "INPUT_MOBILE": EventType.INPUT_MOBILE.value, + "VIEW_MOBILE": EventType.VIEW_MOBILE.value, + "CUSTOM_MOBILE": EventType.CUSTOM_MOBILE.value, + "REQUEST_MOBILE": EventType.REQUEST_MOBILE.value, + "ERROR_MOBILE": EventType.ERROR_MOBILE.value, + "DOM_COMPLETE": PerformanceEventType.LOCATION_DOM_COMPLETE.value, + "LARGEST_CONTENTFUL_PAINT_TIME": PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME.value, + "TTFB": PerformanceEventType.LOCATION_TTFB.value, + "AVG_CPU_LOAD": PerformanceEventType.LOCATION_AVG_CPU_LOAD.value, + "AVG_MEMORY_USAGE": PerformanceEventType.LOCATION_AVG_MEMORY_USAGE.value, + "FETCH_FAILED": PerformanceEventType.FETCH_FAILED.value, }.get(values["type"], values["type"]) return values @@ -371,8 +371,8 @@ class _AlertMessageSchema(BaseModel): class AlertDetectionType(str, Enum): - percent = "percent" - change = "change" + PERCENT = "percent" + CHANGE = "change" class _AlertOptionSchema(BaseModel): @@ -384,34 +384,34 @@ class _AlertOptionSchema(BaseModel): class AlertColumn(str, Enum): - performance__dom_content_loaded__average = "performance.dom_content_loaded.average" - performance__first_meaningful_paint__average = "performance.first_meaningful_paint.average" - performance__page_load_time__average = "performance.page_load_time.average" - performance__dom_build_time__average = "performance.dom_build_time.average" - performance__speed_index__average = "performance.speed_index.average" - performance__page_response_time__average = "performance.page_response_time.average" - performance__ttfb__average = "performance.ttfb.average" - performance__time_to_render__average = "performance.time_to_render.average" - performance__image_load_time__average = "performance.image_load_time.average" - performance__request_load_time__average = "performance.request_load_time.average" - resources__load_time__average = "resources.load_time.average" - resources__missing__count = "resources.missing.count" - errors__4xx_5xx__count = "errors.4xx_5xx.count" - errors__4xx__count = "errors.4xx.count" - errors__5xx__count = "errors.5xx.count" - errors__javascript__impacted_sessions__count = "errors.javascript.impacted_sessions.count" - performance__crashes__count = "performance.crashes.count" - errors__javascript__count = "errors.javascript.count" - errors__backend__count = "errors.backend.count" - custom = "CUSTOM" + PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE = "performance.dom_content_loaded.average" + PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE = "performance.first_meaningful_paint.average" + PERFORMANCE__PAGE_LOAD_TIME__AVERAGE = "performance.page_load_time.average" + PERFORMANCE__DOM_BUILD_TIME__AVERAGE = "performance.dom_build_time.average" + PERFORMANCE__SPEED_INDEX__AVERAGE = "performance.speed_index.average" + PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE = "performance.page_response_time.average" + PERFORMANCE__TTFB__AVERAGE = "performance.ttfb.average" + PERFORMANCE__TIME_TO_RENDER__AVERAGE = "performance.time_to_render.average" + PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE = "performance.image_load_time.average" + PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE = "performance.request_load_time.average" + RESOURCES__LOAD_TIME__AVERAGE = "resources.load_time.average" + RESOURCES__MISSING__COUNT = "resources.missing.count" + ERRORS__4XX_5XX__COUNT = "errors.4xx_5xx.count" + ERRORS__4XX__COUNT = "errors.4xx.count" + ERRORS__5XX__COUNT = "errors.5xx.count" + ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT = "errors.javascript.impacted_sessions.count" + PERFORMANCE__CRASHES__COUNT = "performance.crashes.count" + ERRORS__JAVASCRIPT__COUNT = "errors.javascript.count" + ERRORS__BACKEND__COUNT = "errors.backend.count" + CUSTOM = "CUSTOM" class MathOperator(str, Enum): - _equal = "=" - _less = "<" - _greater = ">" - _less_eq = "<=" - _greater_eq = ">=" + EQUAL = "=" + LESS = "<" + GREATER = ">" + LESS_EQ = "<=" + GREATER_EQ = ">=" class _AlertQuerySchema(BaseModel): @@ -421,14 +421,14 @@ class _AlertQuerySchema(BaseModel): class AlertDetectionMethod(str, Enum): - threshold = "threshold" - change = "change" + THRESHOLD = "threshold" + CHANGE = "change" class AlertSchema(BaseModel): name: str = Field(..., pattern=NAME_PATTERN) detection_method: AlertDetectionMethod = Field(...) - change: Optional[AlertDetectionType] = Field(default=AlertDetectionType.change) + change: Optional[AlertDetectionType] = Field(default=AlertDetectionType.CHANGE) description: Optional[str] = Field(default=None) options: _AlertOptionSchema = Field(...) query: _AlertQuerySchema = Field(...) @@ -439,7 +439,7 @@ def transform_alert(cls, values): values.series_id = None if isinstance(values.query.left, int): values.series_id = values.query.left - values.query.left = AlertColumn.custom + values.query.left = AlertColumn.CUSTOM return values @@ -449,166 +449,154 @@ class SourcemapUploadPayloadSchema(BaseModel): class ErrorSource(str, Enum): - js_exception = "js_exception" - bugsnag = "bugsnag" - cloudwatch = "cloudwatch" - datadog = "datadog" - newrelic = "newrelic" - rollbar = "rollbar" - sentry = "sentry" - stackdriver = "stackdriver" - sumologic = "sumologic" + JS_EXCEPTION = "js_exception" + BUGSNAG = "bugsnag" + CLOUDWATCH = "cloudwatch" + DATADOG = "datadog" + NEWRELIC = "newrelic" + ROLLBAR = "rollbar" + SENTRY = "sentry" + STACKDRIVER = "stackdriver" + SUMOLOGIC = "sumologic" class EventType(str, Enum): - click = "click" - input = "input" - location = "location" - custom = "custom" - request = "request" - request_details = "fetch" - graphql = "graphql" - state_action = "stateAction" - error = "error" - tag = "tag" - click_mobile = "tapIos" - input_mobile = "inputIos" - view_mobile = "viewIos" - custom_mobile = "customIos" - request_mobile = "requestIos" - error_mobile = "errorIos" - swipe_mobile = "swipeIos" + CLICK = "click" + INPUT = "input" + LOCATION = "location" + CUSTOM = "custom" + REQUEST = "request" + REQUEST_DETAILS = "fetch" + GRAPHQL = "graphql" + STATE_ACTION = "stateAction" + ERROR = "error" + TAG = "tag" + CLICK_MOBILE = "tapIos" + INPUT_MOBILE = "inputIos" + VIEW_MOBILE = "viewIos" + CUSTOM_MOBILE = "customIos" + REQUEST_MOBILE = "requestIos" + ERROR_MOBILE = "errorIos" + SWIPE_MOBILE = "swipeIos" class PerformanceEventType(str, Enum): - location_dom_complete = "domComplete" - location_largest_contentful_paint_time = "largestContentfulPaintTime" - location_ttfb = "ttfb" - location_avg_cpu_load = "avgCpuLoad" - location_avg_memory_usage = "avgMemoryUsage" - fetch_failed = "fetchFailed" + LOCATION_DOM_COMPLETE = "domComplete" + LOCATION_LARGEST_CONTENTFUL_PAINT_TIME = "largestContentfulPaintTime" + LOCATION_TTFB = "ttfb" + LOCATION_AVG_CPU_LOAD = "avgCpuLoad" + LOCATION_AVG_MEMORY_USAGE = "avgMemoryUsage" + FETCH_FAILED = "fetchFailed" # fetch_duration = "FETCH_DURATION" class FilterType(str, Enum): - user_os = "userOs" - user_browser = "userBrowser" - user_device = "userDevice" - user_country = "userCountry" - user_city = "userCity" - user_state = "userState" - user_id = "userId" - user_anonymous_id = "userAnonymousId" - referrer = "referrer" - rev_id = "revId" + USER_OS = "userOs" + USER_BROWSER = "userBrowser" + USER_DEVICE = "userDevice" + USER_COUNTRY = "userCountry" + USER_CITY = "userCity" + USER_STATE = "userState" + USER_ID = "userId" + USER_ANONYMOUS_ID = "userAnonymousId" + REFERRER = "referrer" + REV_ID = "revId" # IOS - user_os_mobile = "userOsIos" - user_device_mobile = "userDeviceIos" - user_country_mobile = "userCountryIos" - user_id_mobile = "userIdIos" - user_anonymous_id_mobile = "userAnonymousIdIos" - rev_id_mobile = "revIdIos" + USER_OS_MOBILE = "userOsIos" + USER_DEVICE_MOBILE = "userDeviceIos" + USER_COUNTRY_MOBILE = "userCountryIos" + USER_ID_MOBILE = "userIdIos" + USER_ANONYMOUS_ID_MOBILE = "userAnonymousIdIos" + REV_ID_MOBILE = "revIdIos" # - duration = "duration" - platform = "platform" - metadata = "metadata" - issue = "issue" - events_count = "eventsCount" - utm_source = "utmSource" - utm_medium = "utmMedium" - utm_campaign = "utmCampaign" + DURATION = "duration" + PLATFORM = "platform" + METADATA = "metadata" + ISSUE = "issue" + EVENTS_COUNT = "eventsCount" + UTM_SOURCE = "utmSource" + UTM_MEDIUM = "utmMedium" + UTM_CAMPAIGN = "utmCampaign" # Mobile conditions - thermal_state = "thermalState" - main_thread_cpu = "mainThreadCPU" - view_component = "viewComponent" - log_event = "logEvent" - click_event = "clickEvent" - memory_usage = "memoryUsage" + THERMAL_STATE = "thermalState" + MAIN_THREAD_CPU = "mainThreadCPU" + VIEW_COMPONENT = "viewComponent" + LOG_EVENT = "logEvent" + CLICK_EVENT = "clickEvent" + MEMORY_USAGE = "memoryUsage" class SearchEventOperator(str, Enum): - _is = "is" - _is_any = "isAny" - _on = "on" - _on_any = "onAny" - _is_not = "isNot" - _is_undefined = "isUndefined" - _not_on = "notOn" - _contains = "contains" - _not_contains = "notContains" - _starts_with = "startsWith" - _ends_with = "endsWith" + IS = "is" + IS_ANY = "isAny" + ON = "on" + ON_ANY = "onAny" + IS_NOT = "isNot" + IS_UNDEFINED = "isUndefined" + NOT_ON = "notOn" + CONTAINS = "contains" + NOT_CONTAINS = "notContains" + STARTS_WITH = "startsWith" + ENDS_WITH = "endsWith" class ClickEventExtraOperator(str, Enum): - _on_selector = "onSelector" + ON_SELECTOR = "onSelector" class PlatformType(str, Enum): - mobile = "mobile" - desktop = "desktop" - tablet = "tablet" + MOBILE = "mobile" + DESKTOP = "desktop" + TABLET = "tablet" class SearchEventOrder(str, Enum): - _then = "then" - _or = "or" - _and = "and" + THEN = "then" + OR = "or" + AND = "and" class IssueType(str, Enum): - click_rage = 'click_rage' - dead_click = 'dead_click' - excessive_scrolling = 'excessive_scrolling' - bad_request = 'bad_request' - missing_resource = 'missing_resource' - memory = 'memory' - cpu = 'cpu' - slow_resource = 'slow_resource' - slow_page_load = 'slow_page_load' - crash = 'crash' - custom = 'custom' - js_exception = 'js_exception' - mouse_thrashing = 'mouse_thrashing' + CLICK_RAGE = 'click_rage' + DEAD_CLICK = 'dead_click' + EXCESSIVE_SCROLLING = 'excessive_scrolling' + BAD_REQUEST = 'bad_request' + MISSING_RESOURCE = 'missing_resource' + MEMORY = 'memory' + CPU = 'cpu' + SLOW_RESOURCE = 'slow_resource' + SLOW_PAGE_LOAD = 'slow_page_load' + CRASH = 'crash' + CUSTOM = 'custom' + JS_EXCEPTION = 'js_exception' + MOUSE_THRASHING = 'mouse_thrashing' # IOS - tap_rage = 'tap_rage' + TAP_RAGE = 'tap_rage' class MetricFormatType(str, Enum): - session_count = 'sessionCount' + SESSION_COUNT = 'sessionCount' class MetricExtendedFormatType(str, Enum): - session_count = 'sessionCount' - user_count = 'userCount' - - -class HttpMethod(str, Enum): - _get = 'GET' - _head = 'HEAD' - _post = 'POST' - _put = 'PUT' - _delete = 'DELETE' - _connect = 'CONNECT' - _option = 'OPTIONS' - _trace = 'TRACE' - _patch = 'PATCH' + SESSION_COUNT = 'sessionCount' + USER_COUNT = 'userCount' class FetchFilterType(str, Enum): - _url = "fetchUrl" - _status_code = "fetchStatusCode" - _method = "fetchMethod" - _duration = "fetchDuration" - _request_body = "fetchRequestBody" - _response_body = "fetchResponseBody" + FETCH_URL = "fetchUrl" + FETCH_STATUS_CODE = "fetchStatusCode" + FETCH_METHOD = "fetchMethod" + FETCH_DURATION = "fetchDuration" + FETCH_REQUEST_BODY = "fetchRequestBody" + FETCH_RESPONSE_BODY = "fetchResponseBody" class GraphqlFilterType(str, Enum): - _name = "graphqlName" - _method = "graphqlMethod" - _request_body = "graphqlRequestBody" - _response_body = "graphqlResponseBody" + GRAPHQL_NAME = "graphqlName" + GRAPHQL_METHOD = "graphqlMethod" + GRAPHQL_REQUEST_BODY = "graphqlRequestBody" + GRAPHQL_RESPONSE_BODY = "graphqlResponseBody" class RequestGraphqlFilterSchema(BaseModel): @@ -633,7 +621,7 @@ class SessionSearchEventSchema2(BaseModel): @model_validator(mode='after') def event_validator(cls, values): if isinstance(values.type, PerformanceEventType): - if values.type == PerformanceEventType.fetch_failed: + if values.type == PerformanceEventType.FETCH_FAILED: return values # assert values.get("source") is not None, "source should not be null for PerformanceEventType" # assert isinstance(values["source"], list) and len(values["source"]) > 0, \ @@ -644,18 +632,18 @@ def event_validator(cls, values): assert isinstance(values.source, list), f"source of type list is required for {values.type}" for c in values["source"]: assert isinstance(c, int), f"source value should be of type int for {values.type}" - elif values.type == EventType.error and values.source is None: - values.source = [ErrorSource.js_exception] - elif values.type == EventType.request_details: + elif values.type == EventType.ERROR and values.source is None: + values.source = [ErrorSource.JS_EXCEPTION] + elif values.type == EventType.REQUEST_DETAILS: assert isinstance(values.filters, List) and len(values.filters) > 0, \ - f"filters should be defined for {EventType.request_details}" - elif values.type == EventType.graphql: + f"filters should be defined for {EventType.REQUEST_DETAILS}" + elif values.type == EventType.GRAPHQL: assert isinstance(values.filters, List) and len(values.filters) > 0, \ - f"filters should be defined for {EventType.graphql}" + f"filters should be defined for {EventType.GRAPHQL}" if isinstance(values.operator, ClickEventExtraOperator): - assert values.type == EventType.click, \ - f"operator:{values.operator} is only available for event-type: {EventType.click}" + assert values.type == EventType.CLICK, \ + f"operator:{values.operator} is only available for event-type: {EventType.CLICK}" return values @@ -684,22 +672,22 @@ def _transform_data(cls, values): @model_validator(mode='after') def filter_validator(cls, values): - if values.type == FilterType.metadata: + if values.type == FilterType.METADATA: assert values.source is not None and len(values.source) > 0, \ "must specify a valid 'source' for metadata filter" - elif values.type == FilterType.issue: + elif values.type == FilterType.ISSUE: for v in values.value: if IssueType.has_value(v): v = IssueType(v) else: raise ValueError(f"value should be of type IssueType for {values.type} filter") - elif values.type == FilterType.platform: + elif values.type == FilterType.PLATFORM: for v in values.value: if PlatformType.has_value(v): v = PlatformType(v) else: raise ValueError(f"value should be of type PlatformType for {values.type} filter") - elif values.type == FilterType.events_count: + elif values.type == FilterType.EVENTS_COUNT: if MathOperator.has_value(values.operator): values.operator = MathOperator(values.operator) else: @@ -722,8 +710,8 @@ class _PaginatedSchema(BaseModel): class SortOrderType(str, Enum): - asc = "ASC" - desc = "DESC" + ASC = "ASC" + DESC = "DESC" def add_missing_is_event(values: dict): @@ -743,8 +731,8 @@ class SessionsSearchPayloadSchema(_TimedSchema, _PaginatedSchema): events: List[SessionSearchEventSchema2] = Field(default=[], doc_hidden=True) filters: List[GroupedFilterType] = Field(default=[]) sort: str = Field(default="startTs") - order: SortOrderType = Field(default=SortOrderType.desc) - events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder._then) + order: SortOrderType = Field(default=SortOrderType.DESC) + events_order: Optional[SearchEventOrder] = Field(default=SearchEventOrder.THEN) group_by_user: bool = Field(default=False) bookmarked: bool = Field(default=False) @@ -754,7 +742,7 @@ def transform_order(cls, values): values["sort"] = "startTs" if values.get("order") is None: - values["order"] = SortOrderType.desc + values["order"] = SortOrderType.DESC else: values["order"] = values["order"].upper() return values @@ -802,8 +790,8 @@ def merge_identical_filters(cls, values): # ignore 'issue' type as it could be used for step-filters and tab-filters at the same time i = 0 while i < len(values): - if values[i].is_event or values[i].type == FilterType.issue: - if values[i].type == FilterType.issue: + if values[i].is_event or values[i].type == FilterType.ISSUE: + if values[i].type == FilterType.ISSUE: values[i] = remove_duplicate_values(values[i]) i += 1 continue @@ -811,7 +799,7 @@ def merge_identical_filters(cls, values): while j < len(values): if values[i].type == values[j].type \ and values[i].operator == values[j].operator \ - and (values[i].type != FilterType.metadata or values[i].source == values[j].source): + and (values[i].type != FilterType.METADATA or values[i].source == values[j].source): values[i].value += values[j].value del values[j] else: @@ -823,30 +811,30 @@ def merge_identical_filters(cls, values): class ErrorStatus(str, Enum): - all = 'all' - unresolved = 'unresolved' - resolved = 'resolved' - ignored = 'ignored' + ALL = 'all' + UNRESOLVED = 'unresolved' + RESOLVED = 'resolved' + IGNORED = 'ignored' class ErrorSort(str, Enum): - occurrence = 'occurrence' - users_count = 'users' - sessions_count = 'sessions' + OCCURRENCE = 'occurrence' + USERS_COUNT = 'users' + SESSIONS_COUNT = 'sessions' class SearchErrorsSchema(SessionsSearchPayloadSchema): - sort: ErrorSort = Field(default=ErrorSort.occurrence) + sort: ErrorSort = Field(default=ErrorSort.OCCURRENCE) density: Optional[int] = Field(default=7) - status: Optional[ErrorStatus] = Field(default=ErrorStatus.all) + status: Optional[ErrorStatus] = Field(default=ErrorStatus.ALL) query: Optional[str] = Field(default=None) class ProductAnalyticsSelectedEventType(str, Enum): - click = EventType.click.value - input = EventType.input.value - location = EventType.location.value - custom_event = EventType.custom.value + CLICK = EventType.CLICK.value + INPUT = EventType.INPUT.value + LOCATION = EventType.LOCATION.value + CUSTOM_EVENT = EventType.CUSTOM.value class PathAnalysisSubFilterSchema(BaseModel): @@ -903,7 +891,7 @@ class MobileSignPayloadSchema(BaseModel): class CardSeriesFilterSchema(SearchErrorsSchema): sort: Optional[str] = Field(default=None) - order: SortOrderType = Field(default=SortOrderType.desc) + order: SortOrderType = Field(default=SortOrderType.DESC) group_by_user: Literal[False] = False @@ -915,122 +903,122 @@ class CardSeriesSchema(BaseModel): class MetricTimeseriesViewType(str, Enum): - line_chart = "lineChart" - area_chart = "areaChart" + LINE_CHART = "lineChart" + AREA_CHART = "areaChart" class MetricTableViewType(str, Enum): - table = "table" + TABLE = "table" class MetricOtherViewType(str, Enum): - other_chart = "chart" - list_chart = "list" + OTHER_CHART = "chart" + LIST_CHART = "list" class MetricType(str, Enum): - timeseries = "timeseries" - table = "table" - funnel = "funnel" - errors = "errors" - performance = "performance" - resources = "resources" - web_vital = "webVitals" - pathAnalysis = "pathAnalysis" - retention = "retention" - stickiness = "stickiness" - heat_map = "heatMap" - insights = "insights" + TIMESERIES = "timeseries" + TABLE = "table" + FUNNEL = "funnel" + ERRORS = "errors" + PERFORMANCE = "performance" + RESOURCES = "resources" + WEB_VITAL = "webVitals" + PATH_ANALYSIS = "pathAnalysis" + RETENTION = "retention" + STICKINESS = "stickiness" + HEAT_MAP = "heatMap" + INSIGHTS = "insights" class MetricOfErrors(str, Enum): - calls_errors = "callsErrors" - domains_errors_4xx = "domainsErrors4xx" - domains_errors_5xx = "domainsErrors5xx" - errors_per_domains = "errorsPerDomains" - errors_per_type = "errorsPerType" - impacted_sessions_by_js_errors = "impactedSessionsByJsErrors" - resources_by_party = "resourcesByParty" + CALLS_ERRORS = "callsErrors" + DOMAINS_ERRORS_4XX = "domainsErrors4xx" + DOMAINS_ERRORS_5XX = "domainsErrors5xx" + ERRORS_PER_DOMAINS = "errorsPerDomains" + ERRORS_PER_TYPE = "errorsPerType" + IMPACTED_SESSIONS_BY_JS_ERRORS = "impactedSessionsByJsErrors" + RESOURCES_BY_PARTY = "resourcesByParty" class MetricOfPerformance(str, Enum): - cpu = "cpu" - crashes = "crashes" - fps = "fps" - impacted_sessions_by_slow_pages = "impactedSessionsBySlowPages" - memory_consumption = "memoryConsumption" - pages_dom_buildtime = "pagesDomBuildtime" - pages_response_time = "pagesResponseTime" - pages_response_time_distribution = "pagesResponseTimeDistribution" - resources_vs_visually_complete = "resourcesVsVisuallyComplete" - sessions_per_browser = "sessionsPerBrowser" - slowest_domains = "slowestDomains" - speed_location = "speedLocation" - time_to_render = "timeToRender" + CPU = "cpu" + CRASHES = "crashes" + FPS = "fps" + IMPACTED_SESSIONS_BY_SLOW_PAGES = "impactedSessionsBySlowPages" + MEMORY_CONSUMPTION = "memoryConsumption" + PAGES_DOM_BUILDTIME = "pagesDomBuildtime" + PAGES_RESPONSE_TIME = "pagesResponseTime" + PAGES_RESPONSE_TIME_DISTRIBUTION = "pagesResponseTimeDistribution" + RESOURCES_VS_VISUALLY_COMPLETE = "resourcesVsVisuallyComplete" + SESSIONS_PER_BROWSER = "sessionsPerBrowser" + SLOWEST_DOMAINS = "slowestDomains" + SPEED_LOCATION = "speedLocation" + TIME_TO_RENDER = "timeToRender" class MetricOfResources(str, Enum): - missing_resources = "missingResources" - resources_count_by_type = "resourcesCountByType" - resources_loading_time = "resourcesLoadingTime" - resource_type_vs_response_end = "resourceTypeVsResponseEnd" - slowest_resources = "slowestResources" + MISSING_RESOURCES = "missingResources" + RESOURCES_COUNT_BY_TYPE = "resourcesCountByType" + RESOURCES_LOADING_TIME = "resourcesLoadingTime" + RESOURCE_TYPE_VS_RESPONSE_END = "resourceTypeVsResponseEnd" + SLOWEST_RESOURCES = "slowestResources" class MetricOfWebVitals(str, Enum): - avg_cpu = "avgCpu" - avg_dom_content_loaded = "avgDomContentLoaded" - avg_dom_content_load_start = "avgDomContentLoadStart" - avg_first_contentful_pixel = "avgFirstContentfulPixel" - avg_first_paint = "avgFirstPaint" - avg_fps = "avgFps" - avg_image_load_time = "avgImageLoadTime" - avg_page_load_time = "avgPageLoadTime" - avg_pages_dom_buildtime = "avgPagesDomBuildtime" - avg_pages_response_time = "avgPagesResponseTime" - avg_request_load_time = "avgRequestLoadTime" - avg_response_time = "avgResponseTime" - avg_session_duration = "avgSessionDuration" - avg_till_first_byte = "avgTillFirstByte" - avg_time_to_interactive = "avgTimeToInteractive" - avg_time_to_render = "avgTimeToRender" - avg_used_js_heap_size = "avgUsedJsHeapSize" - avg_visited_pages = "avgVisitedPages" - count_requests = "countRequests" - count_sessions = "countSessions" - count_users = "countUsers" + AVG_CPU = "avgCpu" + AVG_DOM_CONTENT_LOADED = "avgDomContentLoaded" + AVG_DOM_CONTENT_LOAD_START = "avgDomContentLoadStart" + AVG_FIRST_CONTENTFUL_PIXEL = "avgFirstContentfulPixel" + AVG_FIRST_PAINT = "avgFirstPaint" + AVG_FPS = "avgFps" + AVG_IMAGE_LOAD_TIME = "avgImageLoadTime" + AVG_PAGE_LOAD_TIME = "avgPageLoadTime" + AVG_PAGES_DOM_BUILDTIME = "avgPagesDomBuildtime" + AVG_PAGES_RESPONSE_TIME = "avgPagesResponseTime" + AVG_REQUEST_LOAD_TIME = "avgRequestLoadTime" + AVG_RESPONSE_TIME = "avgResponseTime" + AVG_SESSION_DURATION = "avgSessionDuration" + AVG_TILL_FIRST_BYTE = "avgTillFirstByte" + AVG_TIME_TO_INTERACTIVE = "avgTimeToInteractive" + AVG_TIME_TO_RENDER = "avgTimeToRender" + AVG_USED_JS_HEAP_SIZE = "avgUsedJsHeapSize" + AVG_VISITED_PAGES = "avgVisitedPages" + COUNT_REQUESTS = "countRequests" + COUNT_SESSIONS = "countSessions" + COUNT_USERS = "countUsers" class MetricOfTable(str, Enum): - user_os = FilterType.user_os.value - user_browser = FilterType.user_browser.value - user_device = FilterType.user_device.value - user_country = FilterType.user_country.value + USER_OS = FilterType.USER_OS.value + USER_BROWSER = FilterType.USER_BROWSER.value + USER_DEVICE = FilterType.USER_DEVICE.value + USER_COUNTRY = FilterType.USER_COUNTRY.value # user_city = FilterType.user_city.value # user_state = FilterType.user_state.value - user_id = FilterType.user_id.value - issues = FilterType.issue.value - visited_url = "location" - sessions = "sessions" - errors = "jsException" + USER_ID = FilterType.USER_ID.value + ISSUES = FilterType.ISSUE.value + VISITED_URL = "location" + SESSIONS = "sessions" + ERRORS = "jsException" class MetricOfTimeseries(str, Enum): - session_count = "sessionCount" - user_count = "userCount" + SESSION_COUNT = "sessionCount" + USER_COUNT = "userCount" class MetricOfFunnels(str, Enum): - session_count = MetricOfTimeseries.session_count.value - user_count = MetricOfTimeseries.user_count.value + SESSION_COUNT = MetricOfTimeseries.SESSION_COUNT.value + USER_COUNT = MetricOfTimeseries.USER_COUNT.value class MetricOfHeatMap(str, Enum): - heat_map_url = "heatMapUrl" + HEAT_MAP_URL = "heatMapUrl" class MetricOfPathAnalysis(str, Enum): - session_count = MetricOfTimeseries.session_count.value + session_count = MetricOfTimeseries.SESSION_COUNT.value # class CardSessionsSchema(SessionsSearchPayloadSchema): @@ -1133,13 +1121,13 @@ class __CardSchema(CardSessionsSchema): @computed_field @property def is_predefined(self) -> bool: - return self.metric_type in [MetricType.errors, MetricType.performance, - MetricType.resources, MetricType.web_vital] + return self.metric_type in [MetricType.ERRORS, MetricType.PERFORMANCE, + MetricType.RESOURCES, MetricType.WEB_VITAL] class CardTimeSeries(__CardSchema): - metric_type: Literal[MetricType.timeseries] - metric_of: MetricOfTimeseries = Field(default=MetricOfTimeseries.session_count) + metric_type: Literal[MetricType.TIMESERIES] + metric_of: MetricOfTimeseries = Field(default=MetricOfTimeseries.SESSION_COUNT) view_type: MetricTimeseriesViewType @model_validator(mode="before") @@ -1154,14 +1142,14 @@ def __transform(cls, values): class CardTable(__CardSchema): - metric_type: Literal[MetricType.table] - metric_of: MetricOfTable = Field(default=MetricOfTable.user_id) + metric_type: Literal[MetricType.TABLE] + metric_of: MetricOfTable = Field(default=MetricOfTable.USER_ID) view_type: MetricTableViewType = Field(...) - metric_format: MetricExtendedFormatType = Field(default=MetricExtendedFormatType.session_count) + metric_format: MetricExtendedFormatType = Field(default=MetricExtendedFormatType.SESSION_COUNT) @model_validator(mode="before") def __enforce_default(cls, values): - if values.get("metricOf") is not None and values.get("metricOf") != MetricOfTable.issues: + if values.get("metricOf") is not None and values.get("metricOf") != MetricOfTable.ISSUES: values["metricValue"] = [] return values @@ -1172,24 +1160,24 @@ def __transform(cls, values): @model_validator(mode="after") def __validator(cls, values): - if values.metric_of not in (MetricOfTable.issues, MetricOfTable.user_browser, - MetricOfTable.user_device, MetricOfTable.user_country, - MetricOfTable.visited_url): - assert values.metric_format == MetricExtendedFormatType.session_count, \ - f'metricFormat:{MetricExtendedFormatType.user_count.value} is not supported for this metricOf' + if values.metric_of not in (MetricOfTable.ISSUES, MetricOfTable.USER_BROWSER, + MetricOfTable.USER_DEVICE, MetricOfTable.USER_COUNTRY, + MetricOfTable.VISITED_URL): + assert values.metric_format == MetricExtendedFormatType.SESSION_COUNT, \ + f'metricFormat:{MetricExtendedFormatType.USER_COUNT.value} is not supported for this metricOf' return values class CardFunnel(__CardSchema): - metric_type: Literal[MetricType.funnel] - metric_of: MetricOfFunnels = Field(default=MetricOfFunnels.session_count) + metric_type: Literal[MetricType.FUNNEL] + metric_of: MetricOfFunnels = Field(default=MetricOfFunnels.SESSION_COUNT) view_type: MetricOtherViewType = Field(...) @model_validator(mode="before") def __enforce_default(cls, values): if values.get("metricOf") and not MetricOfFunnels.has_value(values["metricOf"]): - values["metricOf"] = MetricOfFunnels.session_count - values["viewType"] = MetricOtherViewType.other_chart + values["metricOf"] = MetricOfFunnels.SESSION_COUNT + values["viewType"] = MetricOtherViewType.OTHER_CHART if values.get("series") is not None and len(values["series"]) > 0: values["series"] = [values["series"][0]] return values @@ -1201,8 +1189,8 @@ def __transform(cls, values): class CardErrors(__CardSchema): - metric_type: Literal[MetricType.errors] - metric_of: MetricOfErrors = Field(default=MetricOfErrors.impacted_sessions_by_js_errors) + metric_type: Literal[MetricType.ERRORS] + metric_of: MetricOfErrors = Field(default=MetricOfErrors.IMPACTED_SESSIONS_BY_JS_ERRORS) view_type: MetricOtherViewType = Field(...) @model_validator(mode="before") @@ -1217,8 +1205,8 @@ def __transform(cls, values): class CardPerformance(__CardSchema): - metric_type: Literal[MetricType.performance] - metric_of: MetricOfPerformance = Field(default=MetricOfPerformance.cpu) + metric_type: Literal[MetricType.PERFORMANCE] + metric_of: MetricOfPerformance = Field(default=MetricOfPerformance.CPU) view_type: MetricOtherViewType = Field(...) @model_validator(mode="before") @@ -1233,8 +1221,8 @@ def __transform(cls, values): class CardResources(__CardSchema): - metric_type: Literal[MetricType.resources] - metric_of: MetricOfResources = Field(default=MetricOfResources.missing_resources) + metric_type: Literal[MetricType.RESOURCES] + metric_of: MetricOfResources = Field(default=MetricOfResources.MISSING_RESOURCES) view_type: MetricOtherViewType = Field(...) @model_validator(mode="before") @@ -1249,8 +1237,8 @@ def __transform(cls, values): class CardWebVital(__CardSchema): - metric_type: Literal[MetricType.web_vital] - metric_of: MetricOfWebVitals = Field(default=MetricOfWebVitals.avg_cpu) + metric_type: Literal[MetricType.WEB_VITAL] + metric_of: MetricOfWebVitals = Field(default=MetricOfWebVitals.AVG_CPU) view_type: MetricOtherViewType = Field(...) @model_validator(mode="before") @@ -1265,8 +1253,8 @@ def __transform(cls, values): class CardHeatMap(__CardSchema): - metric_type: Literal[MetricType.heat_map] - metric_of: MetricOfHeatMap = Field(default=MetricOfHeatMap.heat_map_url) + metric_type: Literal[MetricType.HEAT_MAP] + metric_of: MetricOfHeatMap = Field(default=MetricOfHeatMap.HEAT_MAP_URL) view_type: MetricOtherViewType = Field(...) @model_validator(mode="before") @@ -1280,17 +1268,17 @@ def __transform(cls, values): class MetricOfInsights(str, Enum): - issue_categories = "issueCategories" + ISSUE_CATEGORIES = "issueCategories" class CardInsights(__CardSchema): - metric_type: Literal[MetricType.insights] - metric_of: MetricOfInsights = Field(default=MetricOfInsights.issue_categories) + metric_type: Literal[MetricType.INSIGHTS] + metric_of: MetricOfInsights = Field(default=MetricOfInsights.ISSUE_CATEGORIES) view_type: MetricOtherViewType = Field(...) @model_validator(mode="before") def __enforce_default(cls, values): - values["view_type"] = MetricOtherViewType.list_chart + values["view_type"] = MetricOtherViewType.LIST_CHART return values @model_validator(mode="after") @@ -1300,7 +1288,7 @@ def __transform(cls, values): @model_validator(mode='after') def restrictions(cls, values): - raise ValueError(f"metricType:{MetricType.insights} not supported yet.") + raise ValueError(f"metricType:{MetricType.INSIGHTS} not supported yet.") class CardPathAnalysisSeriesSchema(CardSeriesSchema): @@ -1318,7 +1306,7 @@ def __enforce_default(cls, values): class CardPathAnalysis(__CardSchema): - metric_type: Literal[MetricType.pathAnalysis] + metric_type: Literal[MetricType.PATH_ANALYSIS] metric_of: MetricOfPathAnalysis = Field(default=MetricOfPathAnalysis.session_count) view_type: MetricOtherViewType = Field(...) metric_value: List[ProductAnalyticsSelectedEventType] = Field(default=[]) @@ -1332,7 +1320,7 @@ class CardPathAnalysis(__CardSchema): @model_validator(mode="before") def __enforce_default(cls, values): - values["viewType"] = MetricOtherViewType.other_chart.value + values["viewType"] = MetricOtherViewType.OTHER_CHART.value if values.get("series") is not None and len(values["series"]) > 0: values["series"] = [values["series"][0]] return values @@ -1424,45 +1412,45 @@ class AddWidgetToDashboardPayloadSchema(UpdateWidgetPayloadSchema): class TemplatePredefinedUnits(str, Enum): - millisecond = "ms" - second = "s" - minute = "min" - memory = "mb" - frame = "f/s" - percentage = "%" - count = "count" + MILLISECOND = "ms" + SECOND = "s" + MINUTE = "min" + MEMORY = "mb" + FRAME = "f/s" + PERCENTAGE = "%" + COUNT = "count" class LiveFilterType(str, Enum): - user_os = FilterType.user_os.value - user_browser = FilterType.user_browser.value - user_device = FilterType.user_device.value - user_country = FilterType.user_country.value - user_id = FilterType.user_id.value - user_anonymous_id = FilterType.user_anonymous_id.value - rev_id = FilterType.rev_id.value - platform = FilterType.platform.value - page_title = "pageTitle" - session_id = "sessionId" - metadata = FilterType.metadata.value - user_UUID = "userUuid" - tracker_version = "trackerVersion" - user_browser_version = "userBrowserVersion" - user_device_type = "userDeviceType" + USER_OS = FilterType.USER_OS.value + USER_BROWSER = FilterType.USER_BROWSER.value + USER_DEVICE = FilterType.USER_DEVICE.value + USER_COUNTRY = FilterType.USER_COUNTRY.value + USER_ID = FilterType.USER_ID.value + USER_ANONYMOUS_ID = FilterType.USER_ANONYMOUS_ID.value + REV_ID = FilterType.REV_ID.value + PLATFORM = FilterType.PLATFORM.value + PAGE_TITLE = "pageTitle" + SESSION_ID = "sessionId" + METADATA = FilterType.METADATA.value + USER_UUID = "userUuid" + TRACKER_VERSION = "trackerVersion" + USER_BROWSER_VERSION = "userBrowserVersion" + USER_DEVICE_TYPE = "userDeviceType" class LiveSessionSearchFilterSchema(BaseModel): value: Union[List[str], str] = Field(...) type: LiveFilterType = Field(...) source: Optional[str] = Field(default=None) - operator: Literal[SearchEventOperator._is, \ - SearchEventOperator._contains] = Field(default=SearchEventOperator._contains) + operator: Literal[SearchEventOperator.IS, \ + SearchEventOperator.CONTAINS] = Field(default=SearchEventOperator.CONTAINS) transform = model_validator(mode='before')(transform_old_filter_type) @model_validator(mode='after') def __validator(cls, values): - if values.type is not None and values.type == LiveFilterType.metadata: + if values.type is not None and values.type == LiveFilterType.METADATA: assert values.source is not None, "source should not be null for METADATA type" assert len(values.source) > 0, "source should not be empty for METADATA type" return values @@ -1471,7 +1459,7 @@ def __validator(cls, values): class LiveSessionsSearchPayloadSchema(_PaginatedSchema): filters: List[LiveSessionSearchFilterSchema] = Field([]) sort: Union[LiveFilterType, str] = Field(default="TIMESTAMP") - order: SortOrderType = Field(default=SortOrderType.desc) + order: SortOrderType = Field(default=SortOrderType.DESC) @model_validator(mode="before") def __transform(cls, values): @@ -1485,8 +1473,8 @@ def __transform(cls, values): else: i += 1 for i in values["filters"]: - if i.get("type") == LiveFilterType.platform: - i["type"] = LiveFilterType.user_device_type + if i.get("type") == LiveFilterType.PLATFORM: + i["type"] = LiveFilterType.USER_DEVICE_TYPE if values.get("sort") is not None: if values["sort"].lower() == "startts": values["sort"] = "TIMESTAMP" @@ -1494,24 +1482,24 @@ def __transform(cls, values): class IntegrationType(str, Enum): - github = "GITHUB" - jira = "JIRA" - slack = "SLACK" - ms_teams = "MSTEAMS" - sentry = "SENTRY" - bugsnag = "BUGSNAG" - rollbar = "ROLLBAR" - elasticsearch = "ELASTICSEARCH" - datadog = "DATADOG" - sumologic = "SUMOLOGIC" - stackdriver = "STACKDRIVER" - cloudwatch = "CLOUDWATCH" - newrelic = "NEWRELIC" + GITHUB = "GITHUB" + JIRA = "JIRA" + SLACK = "SLACK" + MS_TEAMS = "MSTEAMS" + SENTRY = "SENTRY" + BUGSNAG = "BUGSNAG" + ROLLBAR = "ROLLBAR" + ELASTICSEARCH = "ELASTICSEARCH" + DATADOG = "DATADOG" + SUMOLOGIC = "SUMOLOGIC" + STACKDRIVER = "STACKDRIVER" + CLOUDWATCH = "CLOUDWATCH" + NEWRELIC = "NEWRELIC" class SearchNoteSchema(_PaginatedSchema): sort: str = Field(default="createdAt") - order: SortOrderType = Field(default=SortOrderType.desc) + order: SortOrderType = Field(default=SortOrderType.DESC) tags: Optional[List[str]] = Field(default=[]) shared_only: bool = Field(default=False) mine_only: bool = Field(default=False) @@ -1536,21 +1524,21 @@ def __validator(cls, values): class WebhookType(str, Enum): - webhook = "webhook" - slack = "slack" - email = "email" - msteams = "msteams" + WEBHOOK = "webhook" + SLACK = "slack" + EMAIL = "email" + MSTEAMS = "msteams" class SearchCardsSchema(_PaginatedSchema): - order: SortOrderType = Field(default=SortOrderType.desc) + order: SortOrderType = Field(default=SortOrderType.DESC) shared_only: bool = Field(default=False) mine_only: bool = Field(default=False) query: Optional[str] = Field(default=None) class _HeatMapSearchEventRaw(SessionSearchEventSchema2): - type: Literal[EventType.location] = Field(...) + type: Literal[EventType.LOCATION] = Field(...) class HeatMapSessionsSearch(SessionsSearchPayloadSchema): @@ -1560,18 +1548,18 @@ class HeatMapSessionsSearch(SessionsSearchPayloadSchema): @model_validator(mode="before") def __transform(cls, values): for f in values.get("filters", []): - if f.get("type") == FilterType.duration: + if f.get("type") == FilterType.DURATION: return values values["filters"] = values.get("filters", []) - values["filters"].append({"value": [5000], "type": FilterType.duration, - "operator": SearchEventOperator._is, "filters": []}) + values["filters"].append({"value": [5000], "type": FilterType.DURATION, + "operator": SearchEventOperator.IS, "filters": []}) return values class HeatMapFilterSchema(BaseModel): - value: List[Literal[IssueType.click_rage, IssueType.dead_click]] = Field(default=[]) - type: Literal[FilterType.issue] = Field(...) - operator: Literal[SearchEventOperator._is, MathOperator._equal] = Field(...) + value: List[Literal[IssueType.CLICK_RAGE, IssueType.DEAD_CLICK]] = Field(default=[]) + type: Literal[FilterType.ISSUE] = Field(...) + operator: Literal[SearchEventOperator.IS, MathOperator.EQUAL] = Field(...) class GetHeatMapPayloadSchema(_TimedSchema): @@ -1616,14 +1604,14 @@ class FeatureFlagCondition(BaseModel): class SearchFlagsSchema(_PaginatedSchema): limit: int = Field(default=15, gt=0, le=200) user_id: Optional[int] = Field(default=None) - order: SortOrderType = Field(default=SortOrderType.desc) + order: SortOrderType = Field(default=SortOrderType.DESC) query: Optional[str] = Field(default=None) is_active: Optional[bool] = Field(default=None) class FeatureFlagType(str, Enum): - single_variant = "single" - multi_variant = "multi" + SINGLE_VARIANT = "single" + MULTI_VARIANT = "multi" class FeatureFlagStatus(BaseModel): @@ -1634,7 +1622,7 @@ class FeatureFlagSchema(BaseModel): payload: Optional[str] = Field(default=None) flag_key: str = Field(..., pattern=r'^[a-zA-Z0-9\-]+$') description: Optional[str] = Field(default=None) - flag_type: FeatureFlagType = Field(default=FeatureFlagType.single_variant) + flag_type: FeatureFlagType = Field(default=FeatureFlagType.SINGLE_VARIANT) is_persist: Optional[bool] = Field(default=False) is_active: Optional[bool] = Field(default=True) conditions: List[FeatureFlagCondition] = Field(default=[], min_length=1) diff --git a/api/test/test_feature_flag.py b/api/test/test_feature_flag.py index 6f2a864f75..c368e8a1b1 100644 --- a/api/test/test_feature_flag.py +++ b/api/test/test_feature_flag.py @@ -107,7 +107,7 @@ def test_search_flags_schema_validation(self): schemas.SearchFlagsSchema( limit=15, user_id=123, - order=schemas.SortOrderType.desc, + order=schemas.SortOrderType.DESC, query="search term", is_active=True ) diff --git a/ee/api/chalicelib/core/alerts_processor.py b/ee/api/chalicelib/core/alerts_processor.py index e442828466..915766b9f7 100644 --- a/ee/api/chalicelib/core/alerts_processor.py +++ b/ee/api/chalicelib/core/alerts_processor.py @@ -18,60 +18,60 @@ logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) LeftToDb = { - schemas.AlertColumn.performance__dom_content_loaded__average: { + schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "COALESCE(AVG(NULLIF(dom_content_loaded_time ,0)),0)"}, - schemas.AlertColumn.performance__first_meaningful_paint__average: { + schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)"}, - schemas.AlertColumn.performance__page_load_time__average: { + schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(load_time ,0))"}, - schemas.AlertColumn.performance__dom_build_time__average: { + schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(dom_building_time,0))"}, - schemas.AlertColumn.performance__speed_index__average: { + schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(speed_index,0))"}, - schemas.AlertColumn.performance__page_response_time__average: { + schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(response_time,0))"}, - schemas.AlertColumn.performance__ttfb__average: { + schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(first_paint_time,0))"}, - schemas.AlertColumn.performance__time_to_render__average: { + schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: { "table": "events.pages INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(visually_complete,0))"}, - schemas.AlertColumn.performance__image_load_time__average: { + schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'"}, - schemas.AlertColumn.performance__request_load_time__average: { + schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'"}, - schemas.AlertColumn.resources__load_time__average: { + schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "AVG(NULLIF(resources.duration,0))"}, - schemas.AlertColumn.resources__missing__count: { + schemas.AlertColumn.RESOURCES__MISSING__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'"}, - schemas.AlertColumn.errors__4xx_5xx__count: { + schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100!=2"}, - schemas.AlertColumn.errors__4xx__count: { + schemas.AlertColumn.ERRORS__4XX__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100=4"}, - schemas.AlertColumn.errors__5xx__count: { + schemas.AlertColumn.ERRORS__5XX__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(session_id)", "condition": "status/100=5"}, - schemas.AlertColumn.errors__javascript__impacted_sessions__count: { + schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: { "table": "events.resources INNER JOIN public.sessions USING(session_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "success= FALSE AND type='script'"}, - schemas.AlertColumn.performance__crashes__count: { + schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: { "table": "public.sessions", "formula": "COUNT(DISTINCT session_id)", "condition": "errors_count > 0 AND duration>0"}, - schemas.AlertColumn.errors__javascript__count: { + schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: { "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'", "joinSessions": False}, - schemas.AlertColumn.errors__backend__count: { + schemas.AlertColumn.ERRORS__BACKEND__COUNT: { "table": "events.errors INNER JOIN public.errors AS m_errors USING (error_id)", "formula": "COUNT(DISTINCT session_id)", "condition": "source!='js_exception'", "joinSessions": False}, } @@ -91,7 +91,7 @@ def can_check(a) -> bool: now = TimeUTC.now() repetitionBase = a["options"]["currentPeriod"] \ - if a["detectionMethod"] == schemas.AlertDetectionMethod.change \ + if a["detectionMethod"] == schemas.AlertDetectionMethod.CHANGE \ and a["options"]["currentPeriod"] > a["options"]["previousPeriod"] \ else a["options"]["previousPeriod"] @@ -114,7 +114,7 @@ def Build(a): main_table = "" if a["seriesId"] is not None: a["filter"]["sort"] = "session_id" - a["filter"]["order"] = schemas.SortOrderType.desc + a["filter"]["order"] = schemas.SortOrderType.DESC a["filter"]["startDate"] = 0 a["filter"]["endDate"] = TimeUTC.now() try: @@ -140,7 +140,7 @@ def Build(a): is_ss = main_table == "public.sessions" q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid""" - if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold: + if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD: if a["seriesId"] is not None: q += f""" FROM ({subQ}) AS stat""" else: @@ -148,7 +148,7 @@ def Build(a): {"AND start_ts >= %(startDate)s AND start_ts <= %(now)s" if j_s else ""}) AS stat""" params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000} else: - if a["change"] == schemas.AlertDetectionType.change: + if a["change"] == schemas.AlertDetectionType.CHANGE: if a["seriesId"] is not None: sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s") sub1 = f"SELECT (({subQ})-({sub2})) AS value" diff --git a/ee/api/chalicelib/core/alerts_processor_exp.py b/ee/api/chalicelib/core/alerts_processor_exp.py index dba05fc412..6cc73fd756 100644 --- a/ee/api/chalicelib/core/alerts_processor_exp.py +++ b/ee/api/chalicelib/core/alerts_processor_exp.py @@ -13,101 +13,101 @@ logging.basicConfig(level=config("LOGLEVEL", default=logging.INFO)) LeftToDb = { - schemas.AlertColumn.performance__dom_content_loaded__average: { + schemas.AlertColumn.PERFORMANCE__DOM_CONTENT_LOADED__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "COALESCE(AVG(NULLIF(dom_content_loaded_event_time ,0)),0)", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__first_meaningful_paint__average: { + schemas.AlertColumn.PERFORMANCE__FIRST_MEANINGFUL_PAINT__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "COALESCE(AVG(NULLIF(first_contentful_paint_time,0)),0)", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__page_load_time__average: { + schemas.AlertColumn.PERFORMANCE__PAGE_LOAD_TIME__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "AVG(NULLIF(load_event_time ,0))", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__dom_build_time__average: { + schemas.AlertColumn.PERFORMANCE__DOM_BUILD_TIME__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "AVG(NULLIF(dom_building_time,0))", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__speed_index__average: { + schemas.AlertColumn.PERFORMANCE__SPEED_INDEX__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "AVG(NULLIF(speed_index,0))", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__page_response_time__average: { + schemas.AlertColumn.PERFORMANCE__PAGE_RESPONSE_TIME__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "AVG(NULLIF(response_time,0))", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__ttfb__average: { + schemas.AlertColumn.PERFORMANCE__TTFB__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "AVG(NULLIF(first_contentful_paint_time,0))", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__time_to_render__average: { + schemas.AlertColumn.PERFORMANCE__TIME_TO_RENDER__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS pages", "formula": "AVG(NULLIF(visually_complete,0))", "eventType": "LOCATION" }, - schemas.AlertColumn.performance__image_load_time__average: { + schemas.AlertColumn.PERFORMANCE__IMAGE_LOAD_TIME__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources", "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='img'" }, - schemas.AlertColumn.performance__request_load_time__average: { + schemas.AlertColumn.PERFORMANCE__REQUEST_LOAD_TIME__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources", "formula": "AVG(NULLIF(resources.duration,0))", "condition": "type='fetch'" }, - schemas.AlertColumn.resources__load_time__average: { + schemas.AlertColumn.RESOURCES__LOAD_TIME__AVERAGE: { "table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources", "formula": "AVG(NULLIF(resources.duration,0))" }, - schemas.AlertColumn.resources__missing__count: { + schemas.AlertColumn.RESOURCES__MISSING__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_resources_table(timestamp)} AS resources", "formula": "COUNT(DISTINCT url_hostpath)", "condition": "success= FALSE AND type='img'" }, - schemas.AlertColumn.errors__4xx_5xx__count: { + schemas.AlertColumn.ERRORS__4XX_5XX__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests", "eventType": "REQUEST", "formula": "COUNT(1)", "condition": "intDiv(requests.status, 100)!=2" }, - schemas.AlertColumn.errors__4xx__count: { + schemas.AlertColumn.ERRORS__4XX__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests", "eventType": "REQUEST", "formula": "COUNT(1)", "condition": "intDiv(requests.status, 100)==4" }, - schemas.AlertColumn.errors__5xx__count: { + schemas.AlertColumn.ERRORS__5XX__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS requests", "eventType": "REQUEST", "formula": "COUNT(1)", "condition": "intDiv(requests.status, 100)==5" }, - schemas.AlertColumn.errors__javascript__impacted_sessions__count: { + schemas.AlertColumn.ERRORS__JAVASCRIPT__IMPACTED_SESSIONS__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors", "eventType": "ERROR", "formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'" }, - schemas.AlertColumn.performance__crashes__count: { + schemas.AlertColumn.PERFORMANCE__CRASHES__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_sessions_table(timestamp)} AS sessions", "formula": "COUNT(DISTINCT session_id)", "condition": "duration>0 AND errors_count>0" }, - schemas.AlertColumn.errors__javascript__count: { + schemas.AlertColumn.ERRORS__JAVASCRIPT__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors", "eventType": "ERROR", "formula": "COUNT(DISTINCT session_id)", "condition": "source='js_exception'" }, - schemas.AlertColumn.errors__backend__count: { + schemas.AlertColumn.ERRORS__BACKEND__COUNT: { "table": lambda timestamp: f"{exp_ch_helper.get_main_events_table(timestamp)} AS errors", "eventType": "ERROR", "formula": "COUNT(DISTINCT session_id)", @@ -122,7 +122,7 @@ def Build(a): full_args = {} if a["seriesId"] is not None: a["filter"]["sort"] = "session_id" - a["filter"]["order"] = schemas.SortOrderType.desc + a["filter"]["order"] = schemas.SortOrderType.DESC a["filter"]["startDate"] = 0 a["filter"]["endDate"] = TimeUTC.now() try: @@ -148,7 +148,7 @@ def Build(a): q = f"""SELECT coalesce(value,0) AS value, coalesce(value,0) {a["query"]["operator"]} {a["query"]["right"]} AS valid""" - if a["detectionMethod"] == schemas.AlertDetectionMethod.threshold: + if a["detectionMethod"] == schemas.AlertDetectionMethod.THRESHOLD: if a["seriesId"] is not None: q += f""" FROM ({subQ}) AS stat""" else: @@ -157,7 +157,7 @@ def Build(a): AND datetime<=toDateTime(%(now)s/1000) ) AS stat""" params = {**params, **full_args, "startDate": TimeUTC.now() - a["options"]["currentPeriod"] * 60 * 1000} else: - if a["change"] == schemas.AlertDetectionType.change: + if a["change"] == schemas.AlertDetectionType.CHANGE: if a["seriesId"] is not None: sub2 = subQ.replace("%(startDate)s", "%(timestamp_sub2)s").replace("%(endDate)s", "%(startDate)s") sub1 = f"SELECT (({subQ})-({sub2})) AS value" diff --git a/ee/api/chalicelib/core/assist_records.py b/ee/api/chalicelib/core/assist_records.py index 62f753e098..7b326786f4 100644 --- a/ee/api/chalicelib/core/assist_records.py +++ b/ee/api/chalicelib/core/assist_records.py @@ -60,7 +60,7 @@ def search_records(project_id: int, data: schemas.AssistRecordSearchPayloadSchem if data.query is not None and len(data.query) > 0: conditions.append("(users.name ILIKE %(query)s OR assist_records.name ILIKE %(query)s)") params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator.CONTAINS) with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT COUNT(assist_records.record_id) OVER () AS count, record_id, user_id, session_id, assist_records.created_at, diff --git a/ee/api/chalicelib/core/autocomplete_exp.py b/ee/api/chalicelib/core/autocomplete_exp.py index 6b3563283c..d76572fb97 100644 --- a/ee/api/chalicelib/core/autocomplete_exp.py +++ b/ee/api/chalicelib/core/autocomplete_exp.py @@ -8,23 +8,23 @@ def __get_autocomplete_table(value, project_id): - autocomplete_events = [schemas.FilterType.rev_id, - schemas.EventType.click, - schemas.FilterType.user_device, - schemas.FilterType.user_id, - schemas.FilterType.user_browser, - schemas.FilterType.user_os, - schemas.EventType.custom, - schemas.FilterType.user_country, - schemas.FilterType.user_city, - schemas.FilterType.user_state, - schemas.EventType.location, - schemas.EventType.input] + autocomplete_events = [schemas.FilterType.REV_ID, + schemas.EventType.CLICK, + schemas.FilterType.USER_DEVICE, + schemas.FilterType.USER_ID, + schemas.FilterType.USER_BROWSER, + schemas.FilterType.USER_OS, + schemas.EventType.CUSTOM, + schemas.FilterType.USER_COUNTRY, + schemas.FilterType.USER_CITY, + schemas.FilterType.USER_STATE, + schemas.EventType.LOCATION, + schemas.EventType.INPUT] autocomplete_events.sort() sub_queries = [] c_list = [] for e in autocomplete_events: - if e == schemas.FilterType.user_country: + if e == schemas.FilterType.USER_COUNTRY: c_list = countries.get_country_code_autocomplete(value) if len(c_list) > 0: sub_queries.append(f"""(SELECT DISTINCT ON(value) '{e.value}' AS _type, value @@ -73,7 +73,7 @@ def __get_autocomplete_table(value, project_id): def __generic_query(typename, value_length=None): - if typename == schemas.FilterType.user_country: + if typename == schemas.FilterType.USER_COUNTRY: return f"""SELECT DISTINCT value, type FROM {TABLE} WHERE @@ -128,7 +128,7 @@ def f(project_id, text): params = {"project_id": project_id, "value": helper.string_to_sql_like(text), "svalue": helper.string_to_sql_like("^" + text)} - if typename == schemas.FilterType.user_country: + if typename == schemas.FilterType.USER_COUNTRY: params["value"] = tuple(countries.get_country_code_autocomplete(text)) if len(params["value"]) == 0: return [] diff --git a/ee/api/chalicelib/core/custom_metrics.py b/ee/api/chalicelib/core/custom_metrics.py index 698df38ba8..eb95b5538a 100644 --- a/ee/api/chalicelib/core/custom_metrics.py +++ b/ee/api/chalicelib/core/custom_metrics.py @@ -159,14 +159,14 @@ def __get_table_of_urls(project_id: int, data: schemas.CardTable, user_id: int = def __get_table_chart(project_id: int, data: schemas.CardTable, user_id: int): supported = { - schemas.MetricOfTable.sessions: __get_table_of_sessions, - schemas.MetricOfTable.errors: __get_table_of_errors, - schemas.MetricOfTable.user_id: __get_table_of_user_ids, - schemas.MetricOfTable.issues: __get_table_of_issues, + schemas.MetricOfTable.SESSIONS: __get_table_of_sessions, + schemas.MetricOfTable.ERRORS: __get_table_of_errors, + schemas.MetricOfTable.USER_ID: __get_table_of_user_ids, + schemas.MetricOfTable.ISSUES: __get_table_of_issues, schemas.MetricOfTable.user_browser: __get_table_of_browsers, - schemas.MetricOfTable.user_device: __get_table_of_devises, - schemas.MetricOfTable.user_country: __get_table_of_countries, - schemas.MetricOfTable.visited_url: __get_table_of_urls, + schemas.MetricOfTable.USER_DEVICE: __get_table_of_devises, + schemas.MetricOfTable.USER_COUNTRY: __get_table_of_countries, + schemas.MetricOfTable.VISITED_URL: __get_table_of_urls, } return supported.get(data.metric_of, not_supported)(project_id=project_id, data=data, user_id=user_id) @@ -178,12 +178,12 @@ def get_chart(project_id: int, data: schemas.CardSchema, user_id: int): data=data.model_dump()) supported = { - schemas.MetricType.timeseries: __get_timeseries_chart, - schemas.MetricType.table: __get_table_chart, - schemas.MetricType.heat_map: __get_heat_map_chart, - schemas.MetricType.funnel: __get_funnel_chart, - schemas.MetricType.insights: __get_insights_chart, - schemas.MetricType.pathAnalysis: __get_path_analysis_chart + schemas.MetricType.TIMESERIES: __get_timeseries_chart, + schemas.MetricType.TABLE: __get_table_chart, + schemas.MetricType.HEAT_MAP: __get_heat_map_chart, + schemas.MetricType.FUNNEL: __get_funnel_chart, + schemas.MetricType.INSIGHTS: __get_insights_chart, + schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_chart } return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id) @@ -293,18 +293,18 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card filters=filters ) # ---- To make issues response close to the chart response - search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count, - operator=schemas.MathOperator._greater, + search_data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, + operator=schemas.MathOperator.GREATER, value=[1])) if len(data.start_point) == 0: - search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location, - operator=schemas.SearchEventOperator._is_any, + search_data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, + operator=schemas.SearchEventOperator.IS_ANY, value=[])) # ---- End for s in data.excludes: search_data.events.append(schemas.SessionSearchEventSchema2(type=s.type, - operator=schemas.SearchEventOperator._not_on, + operator=schemas.SearchEventOperator.NOT_ON, value=s.value)) result = sessions.search_table_of_individual_issues(project_id=project_id, data=search_data) return result @@ -313,15 +313,15 @@ def __get_path_analysis_issues(project_id: int, user_id: int, data: schemas.Card def get_issues(project_id: int, user_id: int, data: schemas.CardSchema): if data.is_predefined: return not_supported() - if data.metric_of == schemas.MetricOfTable.issues: + if data.metric_of == schemas.MetricOfTable.ISSUES: return __get_table_of_issues(project_id=project_id, user_id=user_id, data=data) supported = { - schemas.MetricType.timeseries: not_supported, - schemas.MetricType.table: not_supported, - schemas.MetricType.heat_map: not_supported, - schemas.MetricType.funnel: __get_funnel_issues, - schemas.MetricType.insights: not_supported, - schemas.MetricType.pathAnalysis: __get_path_analysis_issues, + schemas.MetricType.TIMESERIES: not_supported, + schemas.MetricType.TABLE: not_supported, + schemas.MetricType.HEAT_MAP: not_supported, + schemas.MetricType.FUNNEL: __get_funnel_issues, + schemas.MetricType.INSIGHTS: not_supported, + schemas.MetricType.PATH_ANALYSIS: __get_path_analysis_issues, } return supported.get(data.metric_type, not_supported)(project_id=project_id, data=data, user_id=user_id) @@ -337,7 +337,7 @@ def __get_path_analysis_card_info(data: schemas.CardPathAnalysis): def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False): with pg_client.PostgresClient() as cur: session_data = None - if data.metric_type == schemas.MetricType.heat_map: + if data.metric_type == schemas.MetricType.HEAT_MAP: if data.session_id is not None: session_data = {"sessionId": data.session_id} else: @@ -370,7 +370,7 @@ def create_card(project_id, user_id, data: schemas.CardSchema, dashboard=False): params = {"user_id": user_id, "project_id": project_id, **data.model_dump(), **_data} params["default_config"] = json.dumps(data.default_config.model_dump()) params["card_info"] = None - if data.metric_type == schemas.MetricType.pathAnalysis: + if data.metric_type == schemas.MetricType.PATH_ANALYSIS: params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) query = """INSERT INTO metrics (project_id, user_id, name, is_public, @@ -433,9 +433,9 @@ def update_card(metric_id, user_id, project_id, data: schemas.CardSchema): params["d_series_ids"] = tuple(d_series_ids) params["card_info"] = None params["session_data"] = json.dumps(metric["data"]) - if data.metric_type == schemas.MetricType.pathAnalysis: + if data.metric_type == schemas.MetricType.PATH_ANALYSIS: params["card_info"] = json.dumps(__get_path_analysis_card_info(data=data)) - elif data.metric_type == schemas.MetricType.heat_map: + elif data.metric_type == schemas.MetricType.HEAT_MAP: if data.session_id is not None: params["session_data"] = json.dumps({"sessionId": data.session_id}) elif metric.get("data") and metric["data"].get("sessionId"): @@ -499,7 +499,7 @@ def search_all(project_id, user_id, data: schemas.SearchCardsSchema, include_ser if data.query is not None and len(data.query) > 0: constraints.append("(name ILIKE %(query)s OR owner.owner_email ILIKE %(query)s)") params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator.CONTAINS) with pg_client.PostgresClient() as cur: sub_join = "" if include_series: @@ -641,7 +641,7 @@ def get_card(metric_id, project_id, user_id, flatten: bool = True, include_data: for s in row["series"]: s["filter"] = helper.old_search_payload_to_flat(s["filter"]) row = helper.dict_to_camel_case(row) - if row["metricType"] == schemas.MetricType.pathAnalysis: + if row["metricType"] == schemas.MetricType.PATH_ANALYSIS: row = __get_path_analysis_attributes(row=row) return row @@ -740,7 +740,7 @@ def make_chart_from_card(project_id, user_id, metric_id, data: schemas.CardSessi return custom_metrics_predefined.get_metric(key=metric.metric_of, project_id=project_id, data=data.model_dump()) - elif metric.metric_type == schemas.MetricType.heat_map: + elif metric.metric_type == schemas.MetricType.HEAT_MAP: if raw_metric["data"] and raw_metric["data"].get("sessionId"): return heatmaps.get_selected_session(project_id=project_id, session_id=raw_metric["data"]["sessionId"]) diff --git a/ee/api/chalicelib/core/errors.py b/ee/api/chalicelib/core/errors.py index ec49623455..7503488867 100644 --- a/ee/api/chalicelib/core/errors.py +++ b/ee/api/chalicelib/core/errors.py @@ -427,18 +427,18 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if chart: ch_sub_query += [f"timestamp >= generated_timestamp", f"timestamp < generated_timestamp + %({step_size_name})s"] - if platform == schemas.PlatformType.mobile: + if platform == schemas.PlatformType.MOBILE: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == schemas.PlatformType.desktop: + elif platform == schemas.PlatformType.DESKTOP: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query def __get_sort_key(key): return { - schemas.ErrorSort.occurrence: "max_datetime", - schemas.ErrorSort.users_count: "users", - schemas.ErrorSort.sessions_count: "sessions" + schemas.ErrorSort.OCCURRENCE: "max_datetime", + schemas.ErrorSort.USERS_COUNT: "users", + schemas.ErrorSort.SESSIONS_COUNT: "sessions" }.get(key, 'max_datetime') @@ -450,7 +450,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): platform = None for f in data.filters: - if f.type == schemas.FilterType.platform and len(f.value) > 0: + if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0: platform = f.value[0] pg_sub_query = __get_basic_constraints(platform, project_key="sessions.project_id") pg_sub_query += ["sessions.start_ts>=%(startDate)s", "sessions.start_ts<%(endDate)s", "source ='js_exception'", @@ -479,7 +479,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): sort = __get_sort_key('datetime') if data.sort is not None: sort = __get_sort_key(data.sort) - order = schemas.SortOrderType.desc + order = schemas.SortOrderType.DESC if data.order is not None: order = data.order extra_join = "" @@ -490,7 +490,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): "project_id": project_id, "userId": user_id, "step_size": step_size} - if data.status != schemas.ErrorStatus.all: + if data.status != schemas.ErrorStatus.ALL: pg_sub_query.append("status = %(error_status)s") params["error_status"] = data.status if data.limit is not None and data.page is not None: @@ -509,7 +509,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): if data.query is not None and len(data.query) > 0: pg_sub_query.append("(pe.name ILIKE %(error_query)s OR pe.message ILIKE %(error_query)s)") params["error_query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator.CONTAINS) main_pg_query = f"""SELECT full_count, error_id, diff --git a/ee/api/chalicelib/core/errors_exp.py b/ee/api/chalicelib/core/errors_exp.py index c5e6432fb3..f36edc2e6f 100644 --- a/ee/api/chalicelib/core/errors_exp.py +++ b/ee/api/chalicelib/core/errors_exp.py @@ -20,31 +20,31 @@ def _multiple_values(values, value_key="value"): def __get_sql_operator(op: schemas.SearchEventOperator): return { - schemas.SearchEventOperator._is: "=", - schemas.SearchEventOperator._is_any: "IN", - schemas.SearchEventOperator._on: "=", - schemas.SearchEventOperator._on_any: "IN", - schemas.SearchEventOperator._is_not: "!=", - schemas.SearchEventOperator._not_on: "!=", - schemas.SearchEventOperator._contains: "ILIKE", - schemas.SearchEventOperator._not_contains: "NOT ILIKE", - schemas.SearchEventOperator._starts_with: "ILIKE", - schemas.SearchEventOperator._ends_with: "ILIKE", + schemas.SearchEventOperator.IS: "=", + schemas.SearchEventOperator.IS_ANY: "IN", + schemas.SearchEventOperator.ON: "=", + schemas.SearchEventOperator.ON_ANY: "IN", + schemas.SearchEventOperator.IS_NOT: "!=", + schemas.SearchEventOperator.NOT_ON: "!=", + schemas.SearchEventOperator.CONTAINS: "ILIKE", + schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE", + schemas.SearchEventOperator.STARTS_WITH: "ILIKE", + schemas.SearchEventOperator.ENDS_WITH: "ILIKE", }.get(op, "=") def _isAny_opreator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any] + return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY] def _isUndefined_operator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._is_undefined] + return op in [schemas.SearchEventOperator.IS_UNDEFINED] def __is_negation_operator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._is_not, - schemas.SearchEventOperator._not_on, - schemas.SearchEventOperator._not_contains] + return op in [schemas.SearchEventOperator.IS_NOT, + schemas.SearchEventOperator.NOT_ON, + schemas.SearchEventOperator.NOT_CONTAINS] def _multiple_conditions(condition, values, value_key="value", is_not=False): @@ -501,9 +501,9 @@ def __get_basic_constraints(platform=None, time_constraint=True, startTime_arg_n if time_constraint: ch_sub_query += [f"{table_name}datetime >= toDateTime(%({startTime_arg_name})s/1000)", f"{table_name}datetime < toDateTime(%({endTime_arg_name})s/1000)"] - if platform == schemas.PlatformType.mobile: + if platform == schemas.PlatformType.MOBILE: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == schemas.PlatformType.desktop: + elif platform == schemas.PlatformType.DESKTOP: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query @@ -515,9 +515,9 @@ def __get_step_size(startTimestamp, endTimestamp, density): def __get_sort_key(key): return { - schemas.ErrorSort.occurrence: "max_datetime", - schemas.ErrorSort.users_count: "users", - schemas.ErrorSort.sessions_count: "sessions" + schemas.ErrorSort.OCCURRENCE: "max_datetime", + schemas.ErrorSort.USERS_COUNT: "users", + schemas.ErrorSort.SESSIONS_COUNT: "sessions" }.get(key, 'max_datetime') @@ -534,9 +534,9 @@ def __get_basic_constraints_pg(platform=None, time_constraint=True, startTime_ar if chart: ch_sub_query += [f"timestamp >= generated_timestamp", f"timestamp < generated_timestamp + %({step_size_name})s"] - if platform == schemas.PlatformType.mobile: + if platform == schemas.PlatformType.MOBILE: ch_sub_query.append("user_device_type = 'mobile'") - elif platform == schemas.PlatformType.desktop: + elif platform == schemas.PlatformType.DESKTOP: ch_sub_query.append("user_device_type = 'desktop'") return ch_sub_query @@ -547,7 +547,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): platform = None for f in data.filters: - if f.type == schemas.FilterType.platform and len(f.value) > 0: + if f.type == schemas.FilterType.PLATFORM and len(f.value) > 0: platform = f.value[0] ch_sessions_sub_query = __get_basic_constraints(platform, type_condition=False) # ignore platform for errors table @@ -567,7 +567,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): if len(data.events) > 0: errors_condition_count = 0 for i, e in enumerate(data.events): - if e.type == schemas.EventType.error: + if e.type == schemas.EventType.ERROR: errors_condition_count += 1 is_any = _isAny_opreator(e.operator) op = __get_sql_operator(e.operator) @@ -596,7 +596,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): f_k = f"f_value{i}" params = {**params, f_k: f.value, **_multiple_values(f.value, value_key=f_k)} op = __get_sql_operator(f.operator) \ - if filter_type not in [schemas.FilterType.events_count] else f.operator + if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator is_any = _isAny_opreator(f.operator) is_undefined = _isUndefined_operator(f.operator) if not is_any and not is_undefined and len(f.value) == 0: @@ -604,7 +604,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): is_not = False if __is_negation_operator(f.operator): is_not = True - if filter_type == schemas.FilterType.user_browser: + if filter_type == schemas.FilterType.USER_BROWSER: if is_any: ch_sessions_sub_query.append('isNotNull(s.user_browser)') else: @@ -612,14 +612,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): _multiple_conditions(f's.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]: + elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]: if is_any: ch_sessions_sub_query.append('isNotNull(s.user_os)') else: ch_sessions_sub_query.append( _multiple_conditions(f's.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]: + elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]: if is_any: ch_sessions_sub_query.append('isNotNull(s.user_device)') else: @@ -627,7 +627,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): _multiple_conditions(f's.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]: + elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]: if is_any: ch_sessions_sub_query.append('isNotNull(s.user_country)') else: @@ -636,7 +636,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_source]: + elif filter_type in [schemas.FilterType.UTM_SOURCE]: if is_any: ch_sessions_sub_query.append('isNotNull(s.utm_source)') elif is_undefined: @@ -646,7 +646,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): _multiple_conditions(f's.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_medium]: + elif filter_type in [schemas.FilterType.UTM_MEDIUM]: if is_any: ch_sessions_sub_query.append('isNotNull(s.utm_medium)') elif is_undefined: @@ -655,7 +655,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): ch_sessions_sub_query.append( _multiple_conditions(f's.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_campaign]: + elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]: if is_any: ch_sessions_sub_query.append('isNotNull(s.utm_campaign)') elif is_undefined: @@ -665,7 +665,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): _multiple_conditions(f's.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.duration: + elif filter_type == schemas.FilterType.DURATION: if len(f.value) > 0 and f.value[0] is not None: ch_sessions_sub_query.append("s.duration >= %(minDuration)s") params["minDuration"] = f.value[0] @@ -673,14 +673,14 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): ch_sessions_sub_query.append("s.duration <= %(maxDuration)s") params["maxDuration"] = f.value[1] - elif filter_type == schemas.FilterType.referrer: + elif filter_type == schemas.FilterType.REFERRER: # extra_from += f"INNER JOIN {events.EventType.LOCATION.table} AS p USING(session_id)" if is_any: referrer_constraint = 'isNotNull(s.base_referrer)' else: referrer_constraint = _multiple_conditions(f"s.base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k) - elif filter_type == schemas.FilterType.metadata: + elif filter_type == schemas.FilterType.METADATA: # get metadata list only if you need it if meta_keys is None: meta_keys = metadata.get(project_id=project_id) @@ -696,7 +696,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): f"s.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]: if is_any: ch_sessions_sub_query.append('isNotNull(s.user_id)') elif is_undefined: @@ -705,8 +705,8 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): ch_sessions_sub_query.append( _multiple_conditions(f"s.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID, + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]: if is_any: ch_sessions_sub_query.append('isNotNull(s.user_anonymous_id)') elif is_undefined: @@ -717,7 +717,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]: + elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]: if is_any: ch_sessions_sub_query.append('isNotNull(s.rev_id)') elif is_undefined: @@ -727,7 +727,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): _multiple_conditions(f"s.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.platform: + elif filter_type == schemas.FilterType.PLATFORM: # op = __get_sql_operator(f.operator) ch_sessions_sub_query.append( _multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not, @@ -743,7 +743,7 @@ def search(data: schemas.SearchErrorsSchema, project_id, user_id): # if is_not: # extra_constraints[-1] = f"not({extra_constraints[-1]})" # ss_constraints[-1] = f"not({ss_constraints[-1]})" - elif filter_type == schemas.FilterType.events_count: + elif filter_type == schemas.FilterType.EVENTS_COUNT: ch_sessions_sub_query.append( _multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) diff --git a/ee/api/chalicelib/core/events.py b/ee/api/chalicelib/core/events.py index 052aca6cfd..e533f7a908 100644 --- a/ee/api/chalicelib/core/events.py +++ b/ee/api/chalicelib/core/events.py @@ -61,7 +61,7 @@ def __get_grouped_clickrage(rows, session_id, project_id): def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: Optional[schemas.EventType] = None): with pg_client.PostgresClient() as cur: rows = [] - if event_type is None or event_type == schemas.EventType.click: + if event_type is None or event_type == schemas.EventType.CLICK: cur.execute(cur.mogrify("""\ SELECT c.*, @@ -75,7 +75,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: rows += cur.fetchall() if group_clickrage: rows = __get_grouped_clickrage(rows=rows, session_id=session_id, project_id=project_id) - if event_type is None or event_type == schemas.EventType.input: + if event_type is None or event_type == schemas.EventType.INPUT: cur.execute(cur.mogrify(""" SELECT i.*, @@ -87,7 +87,7 @@ def get_by_session_id(session_id, project_id, group_clickrage=False, event_type: {"project_id": project_id, "session_id": session_id}) ) rows += cur.fetchall() - if event_type is None or event_type == schemas.EventType.location: + if event_type is None or event_type == schemas.EventType.LOCATION: cur.execute(cur.mogrify("""\ SELECT l.*, @@ -121,26 +121,26 @@ def _search_tags(project_id, value, key=None, source=None): class EventType: - CLICK = Event(ui_type=schemas.EventType.click, table="events.clicks", column="label") - INPUT = Event(ui_type=schemas.EventType.input, table="events.inputs", column="label") - LOCATION = Event(ui_type=schemas.EventType.location, table="events.pages", column="path") - CUSTOM = Event(ui_type=schemas.EventType.custom, table="events_common.customs", column="name") - REQUEST = Event(ui_type=schemas.EventType.request, table="events_common.requests", column="path") - GRAPHQL = Event(ui_type=schemas.EventType.graphql, table="events.graphql", column="name") - STATEACTION = Event(ui_type=schemas.EventType.state_action, table="events.state_actions", column="name") - TAG = Event(ui_type=schemas.EventType.tag, table="events.tags", column="tag_id") - ERROR = Event(ui_type=schemas.EventType.error, table="events.errors", + CLICK = Event(ui_type=schemas.EventType.CLICK, table="events.clicks", column="label") + INPUT = Event(ui_type=schemas.EventType.INPUT, table="events.inputs", column="label") + LOCATION = Event(ui_type=schemas.EventType.LOCATION, table="events.pages", column="path") + CUSTOM = Event(ui_type=schemas.EventType.CUSTOM, table="events_common.customs", column="name") + REQUEST = Event(ui_type=schemas.EventType.REQUEST, table="events_common.requests", column="path") + GRAPHQL = Event(ui_type=schemas.EventType.GRAPHQL, table="events.graphql", column="name") + STATEACTION = Event(ui_type=schemas.EventType.STATE_ACTION, table="events.state_actions", column="name") + TAG = Event(ui_type=schemas.EventType.TAG, table="events.tags", column="tag_id") + ERROR = Event(ui_type=schemas.EventType.ERROR, table="events.errors", column=None) # column=None because errors are searched by name or message - METADATA = Event(ui_type=schemas.FilterType.metadata, table="public.sessions", column=None) + METADATA = Event(ui_type=schemas.FilterType.METADATA, table="public.sessions", column=None) # MOBILE - CLICK_MOBILE = Event(ui_type=schemas.EventType.click_mobile, table="events_ios.taps", column="label") - INPUT_MOBILE = Event(ui_type=schemas.EventType.input_mobile, table="events_ios.inputs", column="label") - VIEW_MOBILE = Event(ui_type=schemas.EventType.view_mobile, table="events_ios.views", column="name") - SWIPE_MOBILE = Event(ui_type=schemas.EventType.swipe_mobile, table="events_ios.swipes", column="label") - CUSTOM_MOBILE = Event(ui_type=schemas.EventType.custom_mobile, table="events_common.customs", column="name") - REQUEST_MOBILE = Event(ui_type=schemas.EventType.request_mobile, table="events_common.requests", column="path") - CRASH_MOBILE = Event(ui_type=schemas.EventType.error_mobile, table="events_common.crashes", - column=None) # column=None because errors are searched by name or message + CLICK_MOBILE = Event(ui_type=schemas.EventType.CLICK_MOBILE, table="events_ios.taps", column="label") + INPUT_MOBILE = Event(ui_type=schemas.EventType.INPUT_MOBILE, table="events_ios.inputs", column="label") + VIEW_MOBILE = Event(ui_type=schemas.EventType.VIEW_MOBILE, table="events_ios.views", column="name") + SWIPE_MOBILE = Event(ui_type=schemas.EventType.SWIPE_MOBILE, table="events_ios.swipes", column="label") + CUSTOM_MOBILE = Event(ui_type=schemas.EventType.CUSTOM_MOBILE, table="events_common.customs", column="name") + REQUEST_MOBILE = Event(ui_type=schemas.EventType.REQUEST_MOBILE, table="events_common.requests", column="path") + CRASH_MOBILE = Event(ui_type=schemas.EventType.ERROR_MOBILE, table="events_common.crashes", + column=None) # column=None because errors are searched by name or message SUPPORTED_TYPES = { diff --git a/ee/api/chalicelib/core/heatmaps.py b/ee/api/chalicelib/core/heatmaps.py index 2ce9a671ab..357a7a9be7 100644 --- a/ee/api/chalicelib/core/heatmaps.py +++ b/ee/api/chalicelib/core/heatmaps.py @@ -165,27 +165,27 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i no_platform = True no_location = True for f in data.filters: - if f.type == schemas.FilterType.platform: + if f.type == schemas.FilterType.PLATFORM: no_platform = False break for f in data.events: - if f.type == schemas.EventType.location: + if f.type == schemas.EventType.LOCATION: no_location = False if len(f.value) == 0: - f.operator = schemas.SearchEventOperator._is_any + f.operator = schemas.SearchEventOperator.IS_ANY break if no_platform: - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform, - value=[schemas.PlatformType.desktop], - operator=schemas.SearchEventOperator._is)) + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM, + value=[schemas.PlatformType.DESKTOP], + operator=schemas.SearchEventOperator.IS)) if no_location: - data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location, + data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, value=[], - operator=schemas.SearchEventOperator._is_any)) + operator=schemas.SearchEventOperator.IS_ANY)) - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count, + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, value=[0], - operator=schemas.MathOperator._greater)) + operator=schemas.MathOperator.GREATER)) full_args, query_part = sessions.search_query_parts(data=data, error_status=None, errors_only=False, favorite_only=data.bookmarked, issue=None, @@ -194,7 +194,7 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i if len(exclude_sessions) > 0: query_part += "\n AND session_id NOT IN %(exclude_sessions)s" with pg_client.PostgresClient() as cur: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.DESC data.sort = 'duration' main_query = cur.mogrify(f"""SELECT * FROM (SELECT {SESSION_PROJECTION_COLS} @@ -295,27 +295,27 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i no_platform = True no_location = True for f in data.filters: - if f.type == schemas.FilterType.platform: + if f.type == schemas.FilterType.PLATFORM: no_platform = False break for f in data.events: - if f.type == schemas.EventType.location: + if f.type == schemas.EventType.LOCATION: no_location = False if len(f.value) == 0: - f.operator = schemas.SearchEventOperator._is_any + f.operator = schemas.SearchEventOperator.IS_ANY break if no_platform: - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.platform, - value=[schemas.PlatformType.desktop], - operator=schemas.SearchEventOperator._is)) + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.PLATFORM, + value=[schemas.PlatformType.DESKTOP], + operator=schemas.SearchEventOperator.IS)) if no_location: - data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.location, + data.events.append(schemas.SessionSearchEventSchema2(type=schemas.EventType.LOCATION, value=[], - operator=schemas.SearchEventOperator._is_any)) + operator=schemas.SearchEventOperator.IS_ANY)) - data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.events_count, + data.filters.append(schemas.SessionSearchFilterSchema(type=schemas.FilterType.EVENTS_COUNT, value=[0], - operator=schemas.MathOperator._greater)) + operator=schemas.MathOperator.GREATER)) full_args, query_part = sessions.search_query_parts_ch(data=data, error_status=None, errors_only=False, favorite_only=data.bookmarked, issue=None, @@ -324,7 +324,7 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i if len(exclude_sessions) > 0: query_part += "\n AND session_id NOT IN (%(exclude_sessions)s)" with ch_client.ClickHouseClient() as cur: - data.order = schemas.SortOrderType.desc + data.order = schemas.SortOrderType.DESC data.sort = 'duration' main_query = cur.format(f"""SELECT * FROM (SELECT {SESSION_PROJECTION_COLS} @@ -360,7 +360,7 @@ def search_short_session(data: schemas.HeatMapSessionsSearch, project_id, user_i logger.info("couldn't find an existing replay after 3 iterations for heatmap") session['events'] = events.get_by_session_id(project_id=project_id, session_id=session["session_id"], - event_type=schemas.EventType.location) + event_type=schemas.EventType.LOCATION) else: return None diff --git a/ee/api/chalicelib/core/integrations_global.py b/ee/api/chalicelib/core/integrations_global.py index e601a94cc2..e0f5bba4a7 100644 --- a/ee/api/chalicelib/core/integrations_global.py +++ b/ee/api/chalicelib/core/integrations_global.py @@ -9,52 +9,52 @@ def get_global_integrations_status(tenant_id, user_id, project_id): SELECT EXISTS((SELECT 1 FROM public.oauth_authentication WHERE user_id = %(user_id)s - AND provider = 'github')) AS {schemas.IntegrationType.github.value}, + AND provider = 'github')) AS {schemas.IntegrationType.GITHUB.value}, EXISTS((SELECT 1 FROM public.jira_cloud - WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.jira.value}, + WHERE user_id = %(user_id)s)) AS {schemas.IntegrationType.JIRA.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='bugsnag')) AS {schemas.IntegrationType.bugsnag.value}, + AND provider='bugsnag')) AS {schemas.IntegrationType.BUGSNAG.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='cloudwatch')) AS {schemas.IntegrationType.cloudwatch.value}, + AND provider='cloudwatch')) AS {schemas.IntegrationType.CLOUDWATCH.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='datadog')) AS {schemas.IntegrationType.datadog.value}, + AND provider='datadog')) AS {schemas.IntegrationType.DATADOG.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='newrelic')) AS {schemas.IntegrationType.newrelic.value}, + AND provider='newrelic')) AS {schemas.IntegrationType.NEWRELIC.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='rollbar')) AS {schemas.IntegrationType.rollbar.value}, + AND provider='rollbar')) AS {schemas.IntegrationType.ROLLBAR.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sentry')) AS {schemas.IntegrationType.sentry.value}, + AND provider='sentry')) AS {schemas.IntegrationType.SENTRY.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='stackdriver')) AS {schemas.IntegrationType.stackdriver.value}, + AND provider='stackdriver')) AS {schemas.IntegrationType.STACKDRIVER.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='sumologic')) AS {schemas.IntegrationType.sumologic.value}, + AND provider='sumologic')) AS {schemas.IntegrationType.SUMOLOGIC.value}, EXISTS((SELECT 1 FROM public.integrations WHERE project_id=%(project_id)s - AND provider='elasticsearch')) AS {schemas.IntegrationType.elasticsearch.value}, + AND provider='elasticsearch')) AS {schemas.IntegrationType.ELASTICSEARCH.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='slack' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.slack.value}, + WHERE type='slack' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.SLACK.value}, EXISTS((SELECT 1 FROM public.webhooks - WHERE type='msteams' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.ms_teams.value};""", + WHERE type='msteams' AND tenant_id=%(tenant_id)s AND deleted_at ISNULL)) AS {schemas.IntegrationType.MS_TEAMS.value};""", {"user_id": user_id, "tenant_id": tenant_id, "project_id": project_id}) ) current_integrations = cur.fetchone() diff --git a/ee/api/chalicelib/core/metrics.py b/ee/api/chalicelib/core/metrics.py index 9d6ec289da..3d27a6081a 100644 --- a/ee/api/chalicelib/core/metrics.py +++ b/ee/api/chalicelib/core/metrics.py @@ -201,7 +201,7 @@ def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1) count = count[0]["count"] results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.count + results["unit"] = schemas.TemplatePredefinedUnits.COUNT return results @@ -1067,7 +1067,7 @@ def get_speed_index_location(project_id, startTimestamp=TimeUTC.now(delta_days=- FROM {exp_ch_helper.get_main_events_table(startTimestamp)} AS pages WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.millisecond} + return {"value": avg, "chart": helper.list_to_camel_case(rows), "unit": schemas.TemplatePredefinedUnits.MILLISECOND} def get_pages_response_time(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1147,7 +1147,7 @@ def get_pages_response_time_distribution(project_id, startTimestamp=TimeUTC.now( quantiles[0]["values"][i]) else 0)} for i, v in enumerate(quantiles_keys) ], "extremeValues": [{"count": 0}], - "unit": schemas.TemplatePredefinedUnits.millisecond + "unit": schemas.TemplatePredefinedUnits.MILLISECOND } if len(rows) > 0: rows = helper.list_to_camel_case(rows) @@ -1376,7 +1376,7 @@ def get_memory_consumption(project_id, startTimestamp=TimeUTC.now(delta_days=-1) end_time=endTimestamp, density=density, neutral={"value": 0})), - "unit": schemas.TemplatePredefinedUnits.memory} + "unit": schemas.TemplatePredefinedUnits.MEMORY} def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1409,7 +1409,7 @@ def get_avg_cpu(project_id, startTimestamp=TimeUTC.now(delta_days=-1), end_time=endTimestamp, density=density, neutral={"value": 0})), - "unit": schemas.TemplatePredefinedUnits.percentage} + "unit": schemas.TemplatePredefinedUnits.PERCENTAGE} def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1442,7 +1442,7 @@ def get_avg_fps(project_id, startTimestamp=TimeUTC.now(delta_days=-1), end_time=endTimestamp, density=density, neutral={"value": 0})), - "unit": schemas.TemplatePredefinedUnits.frame} + "unit": schemas.TemplatePredefinedUnits.FRAME} def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -1511,7 +1511,7 @@ def get_crashes(project_id, startTimestamp=TimeUTC.now(delta_days=-1), density=density, neutral={"value": 0}), "browsers": browsers, - "unit": schemas.TemplatePredefinedUnits.count} + "unit": schemas.TemplatePredefinedUnits.COUNT} return result @@ -1652,7 +1652,7 @@ def get_slowest_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), FROM {exp_ch_helper.get_main_resources_table(startTimestamp)} AS resources WHERE {" AND ".join(ch_sub_query)};""" avg = ch.execute(query=ch_query, params=params)[0]["avg"] if len(rows) > 0 else 0 - return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.millisecond} + return {"value": avg, "chart": rows, "unit": schemas.TemplatePredefinedUnits.MILLISECOND} def get_errors_per_domains(project_id, startTimestamp=TimeUTC.now(delta_days=-1), @@ -2432,7 +2432,7 @@ def get_user_activity_avg_visited_pages(project_id, startTimestamp=TimeUTC.now(d if len(rows) > 0: previous = helper.dict_to_camel_case(rows[0]) results["progress"] = helper.__progress(old_val=previous["value"], new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.count + results["unit"] = schemas.TemplatePredefinedUnits.COUNT return results @@ -2623,7 +2623,7 @@ def get_top_metrics_count_requests(project_id, startTimestamp=TimeUTC.now(delta_ end_time=endTimestamp, density=density, neutral={"value": 0}) result["chart"] = rows - result["unit"] = schemas.TemplatePredefinedUnits.count + result["unit"] = schemas.TemplatePredefinedUnits.COUNT return helper.dict_to_camel_case(result) @@ -2847,5 +2847,5 @@ def get_unique_users(project_id, startTimestamp=TimeUTC.now(delta_days=-1), count = count[0]["count"] results["progress"] = helper.__progress(old_val=count, new_val=results["value"]) - results["unit"] = schemas.TemplatePredefinedUnits.count + results["unit"] = schemas.TemplatePredefinedUnits.COUNT return results diff --git a/ee/api/chalicelib/core/product_analytics.py b/ee/api/chalicelib/core/product_analytics.py index 262b26e801..9e7aa23e57 100644 --- a/ee/api/chalicelib/core/product_analytics.py +++ b/ee/api/chalicelib/core/product_analytics.py @@ -72,10 +72,10 @@ def __transform_journey(rows, reverse_path=False): JOURNEY_TYPES = { - schemas.ProductAnalyticsSelectedEventType.location: {"eventType": "LOCATION", "column": "url_path"}, - schemas.ProductAnalyticsSelectedEventType.click: {"eventType": "CLICK", "column": "label"}, - schemas.ProductAnalyticsSelectedEventType.input: {"eventType": "INPUT", "column": "label"}, - schemas.ProductAnalyticsSelectedEventType.custom_event: {"eventType": "CUSTOM", "column": "name"} + schemas.ProductAnalyticsSelectedEventType.LOCATION: {"eventType": "LOCATION", "column": "url_path"}, + schemas.ProductAnalyticsSelectedEventType.CLICK: {"eventType": "CLICK", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.INPUT: {"eventType": "INPUT", "column": "label"}, + schemas.ProductAnalyticsSelectedEventType.CUSTOM_EVENT: {"eventType": "CUSTOM", "column": "name"} } @@ -92,9 +92,9 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): start_points_conditions = [] step_0_conditions = [] if len(data.metric_value) == 0: - data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.location) - sub_events.append({"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.location]["column"], - "eventType": schemas.ProductAnalyticsSelectedEventType.location.value}) + data.metric_value.append(schemas.ProductAnalyticsSelectedEventType.LOCATION) + sub_events.append({"column": JOURNEY_TYPES[schemas.ProductAnalyticsSelectedEventType.LOCATION]["column"], + "eventType": schemas.ProductAnalyticsSelectedEventType.LOCATION.value}) else: for v in data.metric_value: if JOURNEY_TYPES.get(v): @@ -161,49 +161,49 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): continue # ---- meta-filters - if f.type == schemas.FilterType.user_browser: + if f.type == schemas.FilterType.USER_BROWSER: if is_any: sessions_conditions.append('isNotNull(user_browser)') else: sessions_conditions.append( sh.multi_conditions(f'user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_os]: + elif f.type in [schemas.FilterType.USER_OS]: if is_any: sessions_conditions.append('isNotNull(user_os)') else: sessions_conditions.append( sh.multi_conditions(f'user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_device]: + elif f.type in [schemas.FilterType.USER_DEVICE]: if is_any: sessions_conditions.append('isNotNull(user_device)') else: sessions_conditions.append( sh.multi_conditions(f'user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_country]: + elif f.type in [schemas.FilterType.USER_COUNTRY]: if is_any: sessions_conditions.append('isNotNull(user_country)') else: sessions_conditions.append( sh.multi_conditions(f'user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.user_city: + elif f.type == schemas.FilterType.USER_CITY: if is_any: sessions_conditions.append('isNotNull(user_city)') else: sessions_conditions.append( sh.multi_conditions(f'user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.user_state: + elif f.type == schemas.FilterType.USER_STATE: if is_any: sessions_conditions.append('isNotNull(user_state)') else: sessions_conditions.append( sh.multi_conditions(f'user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.utm_source]: + elif f.type in [schemas.FilterType.UTM_SOURCE]: if is_any: sessions_conditions.append('isNotNull(utm_source)') elif is_undefined: @@ -213,7 +213,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f'utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.utm_medium]: + elif f.type in [schemas.FilterType.UTM_MEDIUM]: if is_any: sessions_conditions.append('isNotNull(utm_medium)') elif is_undefined: @@ -223,7 +223,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f'utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.utm_campaign]: + elif f.type in [schemas.FilterType.UTM_CAMPAIGN]: if is_any: sessions_conditions.append('isNotNull(utm_campaign)') elif is_undefined: @@ -233,14 +233,14 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f'utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.duration: + elif f.type == schemas.FilterType.DURATION: if len(f.value) > 0 and f.value[0] is not None: sessions_conditions.append("duration >= %(minDuration)s") extra_values["minDuration"] = f.value[0] if len(f.value) > 1 and f.value[1] is not None and int(f.value[1]) > 0: sessions_conditions.append("duration <= %(maxDuration)s") extra_values["maxDuration"] = f.value[1] - elif f.type == schemas.FilterType.referrer: + elif f.type == schemas.FilterType.REFERRER: # extra_from += f"INNER JOIN {events.event_type.LOCATION.table} AS p USING(session_id)" if is_any: sessions_conditions.append('isNotNull(base_referrer)') @@ -248,7 +248,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sessions_conditions.append( sh.multi_conditions(f"base_referrer {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.metadata: + elif f.type == schemas.FilterType.METADATA: # get metadata list only if you need it if meta_keys is None: meta_keys = metadata.get(project_id=project_id) @@ -264,7 +264,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): f"{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: + elif f.type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]: if is_any: sessions_conditions.append('isNotNull(user_id)') elif is_undefined: @@ -274,8 +274,8 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f"user_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_mobile]: + elif f.type in [schemas.FilterType.USER_ANONYMOUS_ID, + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]: if is_any: sessions_conditions.append('isNotNull(user_anonymous_id)') elif is_undefined: @@ -285,7 +285,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f"user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif f.type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]: + elif f.type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]: if is_any: sessions_conditions.append('isNotNull(rev_id)') elif is_undefined: @@ -294,13 +294,13 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sessions_conditions.append( sh.multi_conditions(f"rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.platform: + elif f.type == schemas.FilterType.PLATFORM: # op = __ sh.get_sql_operator(f.operator) sessions_conditions.append( sh.multi_conditions(f"user_device_type {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.issue: + elif f.type == schemas.FilterType.ISSUE: if is_any: sessions_conditions.append("array_length(issue_types, 1) > 0") else: @@ -308,7 +308,7 @@ def path_analysis(project_id: int, data: schemas.CardPathAnalysis): sh.multi_conditions(f"has(issue_types,%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif f.type == schemas.FilterType.events_count: + elif f.type == schemas.FilterType.EVENTS_COUNT: sessions_conditions.append( sh.multi_conditions(f"events_count {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) diff --git a/ee/api/chalicelib/core/sessions_devtool.py b/ee/api/chalicelib/core/sessions_devtool.py index e0b7e5fae0..6958eda788 100644 --- a/ee/api/chalicelib/core/sessions_devtool.py +++ b/ee/api/chalicelib/core/sessions_devtool.py @@ -5,7 +5,7 @@ from chalicelib.core import permissions from chalicelib.utils.storage import StorageClient -SCOPES = SecurityScopes([schemas.Permissions.dev_tools]) +SCOPES = SecurityScopes([schemas.Permissions.DEV_TOOLS]) def __get_devtools_keys(project_id, session_id): diff --git a/ee/api/chalicelib/core/sessions_exp.py b/ee/api/chalicelib/core/sessions_exp.py index da9b3bdc80..76349efc87 100644 --- a/ee/api/chalicelib/core/sessions_exp.py +++ b/ee/api/chalicelib/core/sessions_exp.py @@ -58,23 +58,23 @@ def __get_sql_operator(op: schemas.SearchEventOperator): return { - schemas.SearchEventOperator._is: "=", - schemas.SearchEventOperator._is_any: "IN", - schemas.SearchEventOperator._on: "=", - schemas.SearchEventOperator._on_any: "IN", - schemas.SearchEventOperator._is_not: "!=", - schemas.SearchEventOperator._not_on: "!=", - schemas.SearchEventOperator._contains: "ILIKE", - schemas.SearchEventOperator._not_contains: "NOT ILIKE", - schemas.SearchEventOperator._starts_with: "ILIKE", - schemas.SearchEventOperator._ends_with: "ILIKE", + schemas.SearchEventOperator.IS: "=", + schemas.SearchEventOperator.IS_ANY: "IN", + schemas.SearchEventOperator.ON: "=", + schemas.SearchEventOperator.ON_ANY: "IN", + schemas.SearchEventOperator.IS_NOT: "!=", + schemas.SearchEventOperator.NOT_ON: "!=", + schemas.SearchEventOperator.CONTAINS: "ILIKE", + schemas.SearchEventOperator.NOT_CONTAINS: "NOT ILIKE", + schemas.SearchEventOperator.STARTS_WITH: "ILIKE", + schemas.SearchEventOperator.ENDS_WITH: "ILIKE", }.get(op, "=") def __is_negation_operator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._is_not, - schemas.SearchEventOperator._not_on, - schemas.SearchEventOperator._not_contains] + return op in [schemas.SearchEventOperator.IS_NOT, + schemas.SearchEventOperator.NOT_ON, + schemas.SearchEventOperator.NOT_CONTAINS] def __reverse_sql_operator(op): @@ -99,16 +99,16 @@ def _multiple_values(values, value_key="value"): def _isAny_opreator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._on_any, schemas.SearchEventOperator._is_any] + return op in [schemas.SearchEventOperator.ON_ANY, schemas.SearchEventOperator.IS_ANY] def _isUndefined_operator(op: schemas.SearchEventOperator): - return op in [schemas.SearchEventOperator._is_undefined] + return op in [schemas.SearchEventOperator.IS_UNDEFINED] # This function executes the query and return result def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_id, errors_only=False, - error_status=schemas.ErrorStatus.all, count_only=False, issue=None, ids_only=False, + error_status=schemas.ErrorStatus.ALL, count_only=False, issue=None, ids_only=False, platform="web"): if data.bookmarked: data.startTimestamp, data.endTimestamp = sessions_favorite.get_start_end_timestamp(project_id, user_id) @@ -143,12 +143,12 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ elif data.group_by_user: g_sort = "count(full_sessions)" if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.DESC.value else: data.order = data.order if data.sort is not None and data.sort != 'sessionsCount': sort = helper.key_to_snake_case(data.sort) - g_sort = f"{'MIN' if data.order == schemas.SortOrderType.desc else 'MAX'}({sort})" + g_sort = f"{'MIN' if data.order == schemas.SortOrderType.DESC else 'MAX'}({sort})" else: sort = 'start_ts' @@ -179,7 +179,7 @@ def search_sessions(data: schemas.SessionsSearchPayloadSchema, project_id, user_ full_args) else: if data.order is None: - data.order = schemas.SortOrderType.desc.value + data.order = schemas.SortOrderType.DESC.value else: data.order = data.order sort = 'session_id' @@ -251,25 +251,25 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d step_size = int(metrics_helper.__get_step_size(endTimestamp=data.endTimestamp, startTimestamp=data.startTimestamp, density=density)) extra_event = None - if metric_of == schemas.MetricOfTable.visited_url: + if metric_of == schemas.MetricOfTable.VISITED_URL: extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev WHERE ev.datetime >= toDateTime(%(startDate)s / 1000) AND ev.datetime <= toDateTime(%(endDate)s / 1000) AND ev.project_id = %(project_id)s AND ev.event_type = 'LOCATION'""" - elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: - data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, - operator=schemas.SearchEventOperator._is)) + elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0: + data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE, + operator=schemas.SearchEventOperator.IS)) full_args, query_part = search_query_parts_ch(data=data, error_status=None, errors_only=False, favorite_only=False, issue=None, project_id=project_id, user_id=None, extra_event=extra_event) full_args["step_size"] = step_size sessions = [] with ch_client.ClickHouseClient() as cur: - if metric_type == schemas.MetricType.timeseries: - if view_type == schemas.MetricTimeseriesViewType.line_chart: - if metric_of == schemas.MetricOfTimeseries.session_count: + if metric_type == schemas.MetricType.TIMESERIES: + if view_type == schemas.MetricTimeseriesViewType.LINE_CHART: + if metric_of == schemas.MetricOfTimeseries.SESSION_COUNT: query = f"""SELECT toUnixTimestamp( toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second) ) * 1000 AS timestamp, @@ -279,7 +279,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d {query_part}) AS processed_sessions GROUP BY timestamp ORDER BY timestamp;""" - elif metric_of == schemas.MetricOfTimeseries.user_count: + elif metric_of == schemas.MetricOfTimeseries.USER_COUNT: query = f"""SELECT toUnixTimestamp( toStartOfInterval(processed_sessions.datetime, INTERVAL %(step_size)s second) ) * 1000 AS timestamp, @@ -302,12 +302,12 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d logging.debug(main_query) logging.debug("--------------------") sessions = cur.execute(main_query) - if view_type == schemas.MetricTimeseriesViewType.line_chart: + if view_type == schemas.MetricTimeseriesViewType.LINE_CHART: sessions = metrics.__complete_missing_steps(start_time=data.startTimestamp, end_time=data.endTimestamp, density=density, neutral={"count": 0}, rows=sessions) else: sessions = sessions[0]["count"] if len(sessions) > 0 else 0 - elif metric_type == schemas.MetricType.table: + elif metric_type == schemas.MetricType.TABLE: full_args["limit_s"] = 0 full_args["limit_e"] = 200 if isinstance(metric_of, schemas.MetricOfTable): @@ -315,16 +315,16 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d extra_col = "s.user_id" extra_where = "" pre_query = "" - if metric_of == schemas.MetricOfTable.user_country: + if metric_of == schemas.MetricOfTable.USER_COUNTRY: main_col = "user_country" extra_col = "s.user_country" - elif metric_of == schemas.MetricOfTable.user_device: + elif metric_of == schemas.MetricOfTable.USER_DEVICE: main_col = "user_device" extra_col = "s.user_device" elif metric_of == schemas.MetricOfTable.user_browser: main_col = "user_browser" extra_col = "s.user_browser" - elif metric_of == schemas.MetricOfTable.issues: + elif metric_of == schemas.MetricOfTable.ISSUES: main_col = "issue" extra_col = f"arrayJoin(s.issue_types) AS {main_col}" if len(metric_value) > 0: @@ -334,7 +334,7 @@ def search2_series(data: schemas.SessionsSearchPayloadSchema, project_id: int, d extra_where.append(f"{main_col} = %({arg_name})s") full_args[arg_name] = metric_value[i] extra_where = f"WHERE ({' OR '.join(extra_where)})" - elif metric_of == schemas.MetricOfTable.visited_url: + elif metric_of == schemas.MetricOfTable.VISITED_URL: main_col = "url_path" extra_col = "s.url_path" main_query = cur.format(f"""{pre_query} @@ -373,7 +373,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de extra_event = None extra_deduplication = [] extra_conditions = None - if metric_of == schemas.MetricOfTable.visited_url: + if metric_of == schemas.MetricOfTable.VISITED_URL: extra_event = f"""SELECT DISTINCT ev.session_id, ev.url_path FROM {exp_ch_helper.get_main_events_table(data.startTimestamp)} AS ev WHERE ev.datetime >= toDateTime(%(startDate)s / 1000) @@ -383,7 +383,7 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de extra_deduplication.append("url_path") extra_conditions = {} for e in data.events: - if e.type == schemas.EventType.location: + if e.type == schemas.EventType.LOCATION: if e.operator not in extra_conditions: extra_conditions[e.operator] = schemas.SessionSearchEventSchema2.model_validate({ "type": e.type, @@ -397,9 +397,9 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de extra_conditions[e.operator].value.append(v) extra_conditions = list(extra_conditions.values()) - elif metric_of == schemas.MetricOfTable.issues and len(metric_value) > 0: - data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.issue, - operator=schemas.SearchEventOperator._is)) + elif metric_of == schemas.MetricOfTable.ISSUES and len(metric_value) > 0: + data.filters.append(schemas.SessionSearchFilterSchema(value=metric_value, type=schemas.FilterType.ISSUE, + operator=schemas.SearchEventOperator.IS)) full_args, query_part = search_query_parts_ch(data=data, error_status=None, errors_only=False, favorite_only=False, issue=None, project_id=project_id, user_id=None, extra_event=extra_event, @@ -416,16 +416,16 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de main_col = "user_id" extra_col = "s.user_id" extra_where = "" - if metric_of == schemas.MetricOfTable.user_country: + if metric_of == schemas.MetricOfTable.USER_COUNTRY: main_col = "user_country" extra_col = "s.user_country" - elif metric_of == schemas.MetricOfTable.user_device: + elif metric_of == schemas.MetricOfTable.USER_DEVICE: main_col = "user_device" extra_col = "s.user_device" elif metric_of == schemas.MetricOfTable.user_browser: main_col = "user_browser" extra_col = "s.user_browser" - elif metric_of == schemas.MetricOfTable.issues: + elif metric_of == schemas.MetricOfTable.ISSUES: main_col = "issue" extra_col = f"arrayJoin(s.issue_types) AS {main_col}" if len(metric_value) > 0: @@ -435,11 +435,11 @@ def search2_table(data: schemas.SessionsSearchPayloadSchema, project_id: int, de extra_where.append(f"{main_col} = %({arg_name})s") full_args[arg_name] = metric_value[i] extra_where = f"WHERE ({' OR '.join(extra_where)})" - elif metric_of == schemas.MetricOfTable.visited_url: + elif metric_of == schemas.MetricOfTable.VISITED_URL: main_col = "url_path" extra_col = "s.url_path" - if metric_format == schemas.MetricExtendedFormatType.session_count: + if metric_format == schemas.MetricExtendedFormatType.SESSION_COUNT: main_query = f"""SELECT COUNT(DISTINCT {main_col}) OVER () AS main_count, {main_col} AS name, count(DISTINCT session_id) AS session_count, @@ -528,44 +528,44 @@ def search_table_of_individual_issues(data: schemas.SessionsSearchPayloadSchema, def __is_valid_event(is_any: bool, event: schemas.SessionSearchEventSchema2): - return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.request_details, - schemas.EventType.graphql] \ - or event.type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb, - schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage + return not (not is_any and len(event.value) == 0 and event.type not in [schemas.EventType.REQUEST_DETAILS, + schemas.EventType.GRAPHQL] \ + or event.type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME, + schemas.PerformanceEventType.LOCATION_TTFB, + schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD, + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE ] and (event.source is None or len(event.source) == 0) \ - or event.type in [schemas.EventType.request_details, schemas.EventType.graphql] and ( + or event.type in [schemas.EventType.REQUEST_DETAILS, schemas.EventType.GRAPHQL] and ( event.filters is None or len(event.filters) == 0)) def __get_event_type(event_type: Union[schemas.EventType, schemas.PerformanceEventType], platform="web"): defs = { - schemas.EventType.click: "CLICK", - schemas.EventType.input: "INPUT", - schemas.EventType.location: "LOCATION", - schemas.PerformanceEventType.location_dom_complete: "LOCATION", - schemas.PerformanceEventType.location_largest_contentful_paint_time: "LOCATION", - schemas.PerformanceEventType.location_ttfb: "LOCATION", - schemas.EventType.custom: "CUSTOM", - schemas.EventType.request: "REQUEST", - schemas.EventType.request_details: "REQUEST", - schemas.PerformanceEventType.fetch_failed: "REQUEST", - schemas.EventType.state_action: "STATEACTION", - schemas.EventType.error: "ERROR", - schemas.PerformanceEventType.location_avg_cpu_load: 'PERFORMANCE', - schemas.PerformanceEventType.location_avg_memory_usage: 'PERFORMANCE' + schemas.EventType.CLICK: "CLICK", + schemas.EventType.INPUT: "INPUT", + schemas.EventType.LOCATION: "LOCATION", + schemas.PerformanceEventType.LOCATION_DOM_COMPLETE: "LOCATION", + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME: "LOCATION", + schemas.PerformanceEventType.LOCATION_TTFB: "LOCATION", + schemas.EventType.CUSTOM: "CUSTOM", + schemas.EventType.REQUEST: "REQUEST", + schemas.EventType.REQUEST_DETAILS: "REQUEST", + schemas.PerformanceEventType.FETCH_FAILED: "REQUEST", + schemas.EventType.STATE_ACTION: "STATEACTION", + schemas.EventType.ERROR: "ERROR", + schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD: 'PERFORMANCE', + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE: 'PERFORMANCE' } defs_mobile = { - schemas.EventType.click: "TAP", - schemas.EventType.input: "INPUT", - schemas.EventType.location: "VIEW", - schemas.EventType.custom: "CUSTOM", - schemas.EventType.request: "REQUEST", - schemas.EventType.request_details: "REQUEST", - schemas.PerformanceEventType.fetch_failed: "REQUEST", - schemas.EventType.error: "CRASH", + schemas.EventType.CLICK: "TAP", + schemas.EventType.INPUT: "INPUT", + schemas.EventType.LOCATION: "VIEW", + schemas.EventType.CUSTOM: "CUSTOM", + schemas.EventType.REQUEST: "REQUEST", + schemas.EventType.REQUEST_DETAILS: "REQUEST", + schemas.PerformanceEventType.FETCH_FAILED: "REQUEST", + schemas.EventType.ERROR: "CRASH", } if platform == "ios" and event_type in defs_mobile: return defs_mobile.get(event_type) @@ -581,7 +581,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu if issue: data.filters.append( schemas.SessionSearchFilterSchema(value=[issue['type']], - type=schemas.FilterType.issue.value, + type=schemas.FilterType.ISSUE.value, operator='is') ) ss_constraints = [] @@ -622,7 +622,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu f_k = f"f_value{i}" full_args = {**full_args, f_k: f.value, **_multiple_values(f.value, value_key=f_k)} op = __get_sql_operator(f.operator) \ - if filter_type not in [schemas.FilterType.events_count] else f.operator.value + if filter_type not in [schemas.FilterType.EVENTS_COUNT] else f.operator.value is_any = _isAny_opreator(f.operator) is_undefined = _isUndefined_operator(f.operator) if not is_any and not is_undefined and len(f.value) == 0: @@ -630,7 +630,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu is_not = False if __is_negation_operator(f.operator): is_not = True - if filter_type == schemas.FilterType.user_browser: + if filter_type == schemas.FilterType.USER_BROWSER: if is_any: extra_constraints.append('isNotNull(s.user_browser)') ss_constraints.append('isNotNull(ms.user_browser)') @@ -640,7 +640,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.user_browser {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_os, schemas.FilterType.user_os_mobile]: + elif filter_type in [schemas.FilterType.USER_OS, schemas.FilterType.USER_OS_MOBILE]: if is_any: extra_constraints.append('isNotNull(s.user_os)') ss_constraints.append('isNotNull(ms.user_os)') @@ -650,7 +650,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.user_os {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_device, schemas.FilterType.user_device_mobile]: + elif filter_type in [schemas.FilterType.USER_DEVICE, schemas.FilterType.USER_DEVICE_MOBILE]: if is_any: extra_constraints.append('isNotNull(s.user_device)') ss_constraints.append('isNotNull(ms.user_device)') @@ -660,7 +660,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.user_device {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_country, schemas.FilterType.user_country_mobile]: + elif filter_type in [schemas.FilterType.USER_COUNTRY, schemas.FilterType.USER_COUNTRY_MOBILE]: if is_any: extra_constraints.append('isNotNull(s.user_country)') ss_constraints.append('isNotNull(ms.user_country)') @@ -670,7 +670,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.user_country {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in schemas.FilterType.user_city: + elif filter_type in schemas.FilterType.USER_CITY: if is_any: extra_constraints.append('isNotNull(s.user_city)') ss_constraints.append('isNotNull(ms.user_city)') @@ -680,7 +680,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.user_city {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in schemas.FilterType.user_state: + elif filter_type in schemas.FilterType.USER_STATE: if is_any: extra_constraints.append('isNotNull(s.user_state)') ss_constraints.append('isNotNull(ms.user_state)') @@ -690,7 +690,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.user_state {op} %({f_k})s', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_source]: + elif filter_type in [schemas.FilterType.UTM_SOURCE]: if is_any: extra_constraints.append('isNotNull(s.utm_source)') ss_constraints.append('isNotNull(ms.utm_source)') @@ -704,7 +704,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.utm_source {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_medium]: + elif filter_type in [schemas.FilterType.UTM_MEDIUM]: if is_any: extra_constraints.append('isNotNull(s.utm_medium)') ss_constraints.append('isNotNull(ms.utm_medium)') @@ -718,7 +718,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f'ms.utm_medium {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.utm_campaign]: + elif filter_type in [schemas.FilterType.UTM_CAMPAIGN]: if is_any: extra_constraints.append('isNotNull(s.utm_campaign)') ss_constraints.append('isNotNull(ms.utm_campaign)') @@ -733,7 +733,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu _multiple_conditions(f'ms.utm_campaign {op} toString(%({f_k})s)', f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.duration: + elif filter_type == schemas.FilterType.DURATION: if len(f.value) > 0 and f.value[0] is not None: extra_constraints.append("s.duration >= %(minDuration)s") ss_constraints.append("ms.duration >= %(minDuration)s") @@ -742,7 +742,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu extra_constraints.append("s.duration <= %(maxDuration)s") ss_constraints.append("ms.duration <= %(maxDuration)s") full_args["maxDuration"] = f.value[1] - elif filter_type == schemas.FilterType.referrer: + elif filter_type == schemas.FilterType.REFERRER: if is_any: extra_constraints.append('isNotNull(s.base_referrer)') ss_constraints.append('isNotNull(ms.base_referrer)') @@ -774,7 +774,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu _multiple_conditions( f"ms.{metadata.index_to_colname(meta_keys[f.source])} {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_id, schemas.FilterType.user_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ID, schemas.FilterType.USER_ID_MOBILE]: if is_any: extra_constraints.append('isNotNull(s.user_id)') ss_constraints.append('isNotNull(ms.user_id)') @@ -788,8 +788,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f"ms.user_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.user_anonymous_id, - schemas.FilterType.user_anonymous_id_mobile]: + elif filter_type in [schemas.FilterType.USER_ANONYMOUS_ID, + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE]: if is_any: extra_constraints.append('isNotNull(s.user_anonymous_id)') ss_constraints.append('isNotNull(ms.user_anonymous_id)') @@ -803,7 +803,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f"ms.user_anonymous_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type in [schemas.FilterType.rev_id, schemas.FilterType.rev_id_mobile]: + elif filter_type in [schemas.FilterType.REV_ID, schemas.FilterType.REV_ID_MOBILE]: if is_any: extra_constraints.append('isNotNull(s.rev_id)') ss_constraints.append('isNotNull(ms.rev_id)') @@ -817,7 +817,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f"ms.rev_id {op} toString(%({f_k})s)", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.platform: + elif filter_type == schemas.FilterType.PLATFORM: # op = __get_sql_operator(f.operator) extra_constraints.append( _multiple_conditions(f"s.user_device_type {op} %({f_k})s", f.value, is_not=is_not, @@ -825,7 +825,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ss_constraints.append( _multiple_conditions(f"ms.user_device_type {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) - elif filter_type == schemas.FilterType.issue: + elif filter_type == schemas.FilterType.ISSUE: if is_any: extra_constraints.append("notEmpty(s.issue_types)") ss_constraints.append("notEmpty(ms.issue_types)") @@ -842,7 +842,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu if is_not: extra_constraints[-1] = f"not({extra_constraints[-1]})" ss_constraints[-1] = f"not({ss_constraints[-1]})" - elif filter_type == schemas.FilterType.events_count: + elif filter_type == schemas.FilterType.EVENTS_COUNT: extra_constraints.append( _multiple_conditions(f"s.events_count {op} %({f_k})s", f.value, is_not=is_not, value_key=f_k)) @@ -871,7 +871,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu events_conditions = [] events_conditions_not = [] event_index = 0 - or_events = data.events_order == schemas.SearchEventOrder._or + or_events = data.events_order == schemas.SearchEventOrder.OR for i, event in enumerate(data.events): event_type = event.type is_any = _isAny_opreator(event.operator) @@ -915,7 +915,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'") events_conditions.append({"type": event_where[-1]}) if not is_any: - if event.operator == schemas.ClickEventExtraOperator._on_selector: + if event.operator == schemas.ClickEventExtraOperator.ON_SELECTOR: event_where.append( _multiple_conditions(f"main.selector = %({e_k})s", event.value, value_key=e_k)) events_conditions[-1]["condition"] = event_where[-1] @@ -1093,7 +1093,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"]) - elif event_type == schemas.PerformanceEventType.fetch_failed: + elif event_type == schemas.PerformanceEventType.FETCH_FAILED: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " _column = 'url_path' event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'") @@ -1131,9 +1131,9 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu # _multiple_conditions(f"{tname}.{colname} {event.sourceOperator} %({e_k})s", # event.source, value_key=e_k)) # TODO: isNot for PerformanceEvent - elif event_type in [schemas.PerformanceEventType.location_dom_complete, - schemas.PerformanceEventType.location_largest_contentful_paint_time, - schemas.PerformanceEventType.location_ttfb]: + elif event_type in [schemas.PerformanceEventType.LOCATION_DOM_COMPLETE, + schemas.PerformanceEventType.LOCATION_LARGEST_CONTENTFUL_PAINT_TIME, + schemas.PerformanceEventType.LOCATION_TTFB]: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'") events_conditions.append({"type": event_where[-1]}) @@ -1155,8 +1155,8 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu events_conditions[-1]["condition"].append(event_where[-1]) events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"]) # TODO: isNot for PerformanceEvent - elif event_type in [schemas.PerformanceEventType.location_avg_cpu_load, - schemas.PerformanceEventType.location_avg_memory_usage]: + elif event_type in [schemas.PerformanceEventType.LOCATION_AVG_CPU_LOAD, + schemas.PerformanceEventType.LOCATION_AVG_MEMORY_USAGE]: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'") events_conditions.append({"type": event_where[-1]}) @@ -1229,7 +1229,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu # event.source, value_key=e_k) # event_index += 1 # TODO: no isNot for RequestDetails - elif event_type == schemas.EventType.request_details: + elif event_type == schemas.EventType.REQUEST_DETAILS: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " event_where.append(f"main.event_type='{__get_event_type(event_type, platform=platform)}'") events_conditions.append({"type": event_where[-1]}) @@ -1243,35 +1243,35 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu op = __get_sql_operator(f.operator) e_k_f = e_k + f"_fetch{j}" full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)} - if f.type == schemas.FetchFilterType._url: + if f.type == schemas.FetchFilterType.FETCH_URL: event_where.append( _multiple_conditions(f"main.url_path {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) apply = True - elif f.type == schemas.FetchFilterType._status_code: + elif f.type == schemas.FetchFilterType.FETCH_STATUS_CODE: event_where.append( _multiple_conditions(f"main.status {f.operator} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) apply = True - elif f.type == schemas.FetchFilterType._method: + elif f.type == schemas.FetchFilterType.FETCH_METHOD: event_where.append( _multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) apply = True - elif f.type == schemas.FetchFilterType._duration: + elif f.type == schemas.FetchFilterType.FETCH_DURATION: event_where.append( _multiple_conditions(f"main.duration {f.operator} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) apply = True - elif f.type == schemas.FetchFilterType._request_body: + elif f.type == schemas.FetchFilterType.FETCH_REQUEST_BODY: event_where.append( _multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) apply = True - elif f.type == schemas.FetchFilterType._response_body: + elif f.type == schemas.FetchFilterType.FETCH_RESPONSE_BODY: event_where.append( _multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) @@ -1283,7 +1283,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu else: events_conditions[-1]["condition"] = " AND ".join(events_conditions[-1]["condition"]) # TODO: no isNot for GraphQL - elif event_type == schemas.EventType.graphql: + elif event_type == schemas.EventType.GRAPHQL: event_from = event_from % f"{MAIN_EVENTS_TABLE} AS main " event_where.append(f"main.event_type='GRAPHQL'") events_conditions.append({"type": event_where[-1]}) @@ -1296,20 +1296,20 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu op = __get_sql_operator(f.operator) e_k_f = e_k + f"_graphql{j}" full_args = {**full_args, **_multiple_values(f.value, value_key=e_k_f)} - if f.type == schemas.GraphqlFilterType._name: + if f.type == schemas.GraphqlFilterType.GRAPHQL_NAME: event_where.append( _multiple_conditions(f"main.{events.EventType.GRAPHQL.column} {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) - elif f.type == schemas.GraphqlFilterType._method: + elif f.type == schemas.GraphqlFilterType.GRAPHQL_METHOD: event_where.append( _multiple_conditions(f"main.method {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) - elif f.type == schemas.GraphqlFilterType._request_body: + elif f.type == schemas.GraphqlFilterType.GRAPHQL_REQUEST_BODY: event_where.append( _multiple_conditions(f"main.request_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) - elif f.type == schemas.GraphqlFilterType._response_body: + elif f.type == schemas.GraphqlFilterType.GRAPHQL_RESPONSE_BODY: event_where.append( _multiple_conditions(f"main.response_body {op} %({e_k_f})s", f.value, value_key=e_k_f)) events_conditions[-1]["condition"].append(event_where[-1]) @@ -1346,7 +1346,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu ) AS event_{event_index} {"ON(TRUE)" if event_index > 0 else ""}\ """) else: - if data.events_order == schemas.SearchEventOrder._then: + if data.events_order == schemas.SearchEventOrder.THEN: pass else: events_query_from.append(f"""\ @@ -1358,10 +1358,10 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu """) event_index += 1 # limit THEN-events to 7 in CH because sequenceMatch cannot take more arguments - if event_index == 7 and data.events_order == schemas.SearchEventOrder._then: + if event_index == 7 and data.events_order == schemas.SearchEventOrder.THEN: break if event_index < 2: - data.events_order = schemas.SearchEventOrder._or + data.events_order = schemas.SearchEventOrder.OR if len(events_extra_join) > 0: if event_index < 2: events_extra_join = f"INNER JOIN ({events_extra_join}) AS main1 USING(error_id)" @@ -1372,7 +1372,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu FROM {exp_ch_helper.get_user_favorite_sessions_table()} AS user_favorite_sessions WHERE user_id = %(userId)s)""") - if data.events_order in [schemas.SearchEventOrder._then, schemas.SearchEventOrder._and]: + if data.events_order in [schemas.SearchEventOrder.THEN, schemas.SearchEventOrder.AND]: sequence_pattern = [f'(?{i + 1}){c.get("time", "")}' for i, c in enumerate(events_conditions)] sub_join = "" type_conditions = [] @@ -1417,7 +1417,7 @@ def search_query_parts_ch(data: schemas.SessionsSearchPayloadSchema, error_statu del _value_conditions_not del value_conditions_not - if data.events_order == schemas.SearchEventOrder._then: + if data.events_order == schemas.SearchEventOrder.THEN: having = f"""HAVING sequenceMatch('{''.join(sequence_pattern)}')(main.datetime,{','.join(sequence_conditions)})""" else: having = f"""HAVING {" AND ".join([f"countIf({c})>0" for c in list(set(sequence_conditions))])}""" @@ -1598,8 +1598,8 @@ def search_by_metadata(tenant_id, user_id, m_key, m_value, project_id=None): available_keys = metadata.get_keys_by_projects(project_ids) for i in available_keys: - available_keys[i]["user_id"] = schemas.FilterType.user_id - available_keys[i]["user_anonymous_id"] = schemas.FilterType.user_anonymous_id + available_keys[i]["user_id"] = schemas.FilterType.USER_ID + available_keys[i]["user_anonymous_id"] = schemas.FilterType.USER_ANONYMOUS_ID results = {} for i in project_ids: if m_key not in available_keys[i].values(): diff --git a/ee/api/chalicelib/core/sessions_insights.py b/ee/api/chalicelib/core/sessions_insights.py index 5bb433f1cf..c1b7d00a37 100644 --- a/ee/api/chalicelib/core/sessions_insights.py +++ b/ee/api/chalicelib/core/sessions_insights.py @@ -158,7 +158,7 @@ def query_requests_by_period(project_id, start_time, end_time, filters: Optional for n in names_: if n is None: continue - data_ = {'category': schemas.InsightCategories.network, 'name': n, + data_ = {'category': schemas.InsightCategories.NETWORK, 'name': n, 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} for n_, v in ratio: if n == n_: @@ -258,7 +258,7 @@ def query_most_errors_by_period(project_id, start_time, end_time, for n in names_: if n is None: continue - data_ = {'category': schemas.InsightCategories.errors, 'name': n, + data_ = {'category': schemas.InsightCategories.ERRORS, 'name': n, 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} for n_, v in ratio: if n == n_: @@ -338,7 +338,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, output = list() if cpu_oldvalue is not None or cpu_newvalue is not None: - output.append({'category': schemas.InsightCategories.resources, + output.append({'category': schemas.InsightCategories.RESOURCES, 'name': 'cpu', 'value': cpu_newvalue, 'oldValue': cpu_oldvalue, @@ -346,7 +346,7 @@ def query_cpu_memory_by_period(project_id, start_time, end_time, cpu_newvalue - cpu_oldvalue) / cpu_oldvalue if cpu_ratio is not None else cpu_ratio, 'isNew': True if cpu_newvalue is not None and cpu_oldvalue is None else False}) if mem_oldvalue is not None or mem_newvalue is not None: - output.append({'category': schemas.InsightCategories.resources, + output.append({'category': schemas.InsightCategories.RESOURCES, 'name': 'memory', 'value': mem_newvalue, 'oldValue': mem_oldvalue, @@ -423,7 +423,7 @@ def query_click_rage_by_period(project_id, start_time, end_time, for n in names_: if n is None: continue - data_ = {'category': schemas.InsightCategories.rage, 'name': n, + data_ = {'category': schemas.InsightCategories.RAGE, 'name': n, 'value': None, 'oldValue': None, 'ratio': None, 'change': None, 'isNew': True} for n_, v in ratio: if n == n_: @@ -452,16 +452,16 @@ def fetch_selected(project_id, data: schemas.GetInsightsSchema): if len(data.series) > 0: filters = data.series[0].filter - if schemas.InsightCategories.errors in data.metricValue: + if schemas.InsightCategories.ERRORS in data.metricValue: output += query_most_errors_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) - if schemas.InsightCategories.network in data.metricValue: + if schemas.InsightCategories.NETWORK in data.metricValue: output += query_requests_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) - if schemas.InsightCategories.rage in data.metricValue: + if schemas.InsightCategories.RAGE in data.metricValue: output += query_click_rage_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) - if schemas.InsightCategories.resources in data.metricValue: + if schemas.InsightCategories.RESOURCES in data.metricValue: output += query_cpu_memory_by_period(project_id=project_id, start_time=data.startTimestamp, end_time=data.endTimestamp, filters=filters) return output diff --git a/ee/api/chalicelib/core/sessions_metas.py b/ee/api/chalicelib/core/sessions_metas.py index 782f68ec84..97907768bb 100644 --- a/ee/api/chalicelib/core/sessions_metas.py +++ b/ee/api/chalicelib/core/sessions_metas.py @@ -8,65 +8,65 @@ from . import autocomplete as autocomplete SUPPORTED_TYPES = { - schemas.FilterType.user_os: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os)), - schemas.FilterType.user_browser: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_browser)), - schemas.FilterType.user_device: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device)), - schemas.FilterType.user_country: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country)), - schemas.FilterType.user_city: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_city)), - schemas.FilterType.user_state: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_state)), - schemas.FilterType.user_id: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id)), - schemas.FilterType.user_anonymous_id: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id)), - schemas.FilterType.rev_id: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id)), - schemas.FilterType.referrer: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.referrer)), - schemas.FilterType.utm_campaign: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_campaign)), - schemas.FilterType.utm_medium: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_medium)), - schemas.FilterType.utm_source: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.utm_source)), + schemas.FilterType.USER_OS: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS)), + schemas.FilterType.USER_BROWSER: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_BROWSER)), + schemas.FilterType.USER_DEVICE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE)), + schemas.FilterType.USER_COUNTRY: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY)), + schemas.FilterType.USER_CITY: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_CITY)), + schemas.FilterType.USER_STATE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_STATE)), + schemas.FilterType.USER_ID: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID)), + schemas.FilterType.USER_ANONYMOUS_ID: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID)), + schemas.FilterType.REV_ID: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID)), + schemas.FilterType.REFERRER: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REFERRER)), + schemas.FilterType.UTM_CAMPAIGN: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_CAMPAIGN)), + schemas.FilterType.UTM_MEDIUM: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_MEDIUM)), + schemas.FilterType.UTM_SOURCE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.UTM_SOURCE)), # MOBILE - schemas.FilterType.user_os_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_os_mobile)), - schemas.FilterType.user_device_mobile: SupportedFilter( + schemas.FilterType.USER_OS_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_OS_MOBILE)), + schemas.FilterType.USER_DEVICE_MOBILE: SupportedFilter( get=autocomplete.__generic_autocomplete_metas( - typename=schemas.FilterType.user_device_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_device_mobile)), - schemas.FilterType.user_country_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_country_mobile)), - schemas.FilterType.user_id_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_id_mobile)), - schemas.FilterType.user_anonymous_id_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.user_anonymous_id_mobile)), - schemas.FilterType.rev_id_mobile: SupportedFilter( - get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile), - query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.rev_id_mobile)), + typename=schemas.FilterType.USER_DEVICE_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_DEVICE_MOBILE)), + schemas.FilterType.USER_COUNTRY_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_COUNTRY_MOBILE)), + schemas.FilterType.USER_ID_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ID_MOBILE)), + schemas.FilterType.USER_ANONYMOUS_ID_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.USER_ANONYMOUS_ID_MOBILE)), + schemas.FilterType.REV_ID_MOBILE: SupportedFilter( + get=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE), + query=autocomplete.__generic_autocomplete_metas(typename=schemas.FilterType.REV_ID_MOBILE)), } diff --git a/ee/api/chalicelib/core/traces.py b/ee/api/chalicelib/core/traces.py index 786358e1ad..04194a2379 100644 --- a/ee/api/chalicelib/core/traces.py +++ b/ee/api/chalicelib/core/traces.py @@ -177,7 +177,7 @@ def get_all(tenant_id, data: schemas.TrailSearchPayloadSchema): conditions.append("users.name ILIKE %(query)s") conditions.append("users.tenant_id = %(tenant_id)s") params["query"] = helper.values_for_operator(value=data.query, - op=schemas.SearchEventOperator._contains) + op=schemas.SearchEventOperator.CONTAINS) cur.execute( cur.mogrify( f"""SELECT COUNT(*) AS count, diff --git a/ee/api/chalicelib/core/webhook.py b/ee/api/chalicelib/core/webhook.py index 2b2d967239..548c5769b4 100644 --- a/ee/api/chalicelib/core/webhook.py +++ b/ee/api/chalicelib/core/webhook.py @@ -114,7 +114,7 @@ def add(tenant_id, endpoint, auth_header=None, webhook_type='webhook', name="", def exists_by_name(tenant_id: int, name: str, exclude_id: Optional[int], - webhook_type: str = schemas.WebhookType.webhook) -> bool: + webhook_type: str = schemas.WebhookType.WEBHOOK) -> bool: with pg_client.PostgresClient() as cur: query = cur.mogrify(f"""SELECT EXISTS(SELECT 1 FROM public.webhooks diff --git a/ee/api/routers/core_dynamic.py b/ee/api/routers/core_dynamic.py index c61bb4970f..90e9f87013 100644 --- a/ee/api/routers/core_dynamic.py +++ b/ee/api/routers/core_dynamic.py @@ -235,7 +235,7 @@ def get_projects(context: schemas.CurrentContext = Depends(OR_context)): # for backward compatibility @app.get('/{projectId}/sessions/{sessionId}', tags=["sessions", "replay"], - dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)]) def get_session(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if not sessionId.isnumeric(): @@ -255,7 +255,7 @@ def get_session(projectId: int, sessionId: Union[int, str], background_tasks: Ba @app.post('/{projectId}/sessions/search', tags=["sessions"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, @@ -264,7 +264,7 @@ def sessions_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = @app.post('/{projectId}/sessions/search/ids', tags=["sessions"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions.search_sessions(data=data, project_id=projectId, user_id=context.user_id, ids_only=True, @@ -273,7 +273,7 @@ def session_ids_search(projectId: int, data: schemas.SessionsSearchPayloadSchema @app.get('/{projectId}/sessions/{sessionId}/first-mob', tags=["sessions", "replay"], - dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)]) def get_first_mob_file(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if not sessionId.isnumeric(): @@ -289,7 +289,7 @@ def get_first_mob_file(projectId: int, sessionId: Union[int, str], background_ta @app.get('/{projectId}/sessions/{sessionId}/replay', tags=["sessions", "replay"], - dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)]) def get_session_events(projectId: int, sessionId: Union[int, str], background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): if not sessionId.isnumeric(): @@ -309,7 +309,7 @@ def get_session_events(projectId: int, sessionId: Union[int, str], background_ta @app.get('/{projectId}/sessions/{sessionId}/events', tags=["sessions", "replay"], - dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)]) def get_session_events(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): if not sessionId.isnumeric(): @@ -326,7 +326,7 @@ def get_session_events(projectId: int, sessionId: Union[int, str], @app.get('/{projectId}/sessions/{sessionId}/errors/{errorId}/sourcemaps', tags=["sessions", "sourcemaps"], - dependencies=[OR_scope(Permissions.dev_tools)]) + dependencies=[OR_scope(Permissions.DEV_TOOLS)]) def get_error_trace(projectId: int, sessionId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) @@ -337,7 +337,7 @@ def get_error_trace(projectId: int, sessionId: int, errorId: str, } -@app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) +@app.get('/{projectId}/errors/{errorId}', tags=['errors'], dependencies=[OR_scope(Permissions.DEV_TOOLS)]) def errors_get_details(projectId: int, errorId: str, background_tasks: BackgroundTasks, density24: int = 24, density30: int = 30, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_details(project_id=projectId, user_id=context.user_id, error_id=errorId, @@ -348,7 +348,7 @@ def errors_get_details(projectId: int, errorId: str, background_tasks: Backgroun return data -@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.dev_tools)]) +@app.get('/{projectId}/errors/{errorId}/sourcemaps', tags=['errors'], dependencies=[OR_scope(Permissions.DEV_TOOLS)]) def errors_get_details_sourcemaps(projectId: int, errorId: str, context: schemas.CurrentContext = Depends(OR_context)): data = errors.get_trace(project_id=projectId, error_id=errorId) @@ -359,7 +359,7 @@ def errors_get_details_sourcemaps(projectId: int, errorId: str, } -@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.dev_tools)]) +@app.get('/{projectId}/errors/{errorId}/{action}', tags=["errors"], dependencies=[OR_scope(Permissions.DEV_TOOLS)]) def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDate: int = TimeUTC.now(-7), endDate: int = TimeUTC.now(), context: schemas.CurrentContext = Depends(OR_context)): @@ -378,7 +378,7 @@ def add_remove_favorite_error(projectId: int, errorId: str, action: str, startDa @app.get('/{projectId}/assist/sessions/{sessionId}', tags=["assist"], - dependencies=[OR_scope(Permissions.assist_live, ServicePermissions.assist_live)]) + dependencies=[OR_scope(Permissions.ASSIST_LIVE, ServicePermissions.ASSIST_LIVE)]) def get_live_session(projectId: int, sessionId: str, background_tasks: BackgroundTasks, context: schemas.CurrentContext = Depends(OR_context)): data = assist.get_live_session_by_id(project_id=projectId, session_id=sessionId) @@ -394,8 +394,8 @@ def get_live_session(projectId: int, sessionId: str, background_tasks: Backgroun @app.get('/{projectId}/unprocessed/{sessionId}/dom.mob', tags=["assist"], - dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, - ServicePermissions.assist_live, ServicePermissions.session_replay)]) + dependencies=[OR_scope(Permissions.ASSIST_LIVE, Permissions.SESSION_REPLAY, + ServicePermissions.ASSIST_LIVE, ServicePermissions.SESSION_REPLAY)]) def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Replay file not found"]} @@ -417,9 +417,9 @@ def get_live_session_replay_file(projectId: int, sessionId: Union[int, str], @app.get('/{projectId}/unprocessed/{sessionId}/devtools.mob', tags=["assist"], - dependencies=[OR_scope(Permissions.assist_live, Permissions.session_replay, Permissions.dev_tools, - ServicePermissions.assist_live, ServicePermissions.session_replay, - ServicePermissions.dev_tools)]) + dependencies=[OR_scope(Permissions.ASSIST_LIVE, Permissions.SESSION_REPLAY, Permissions.DEV_TOOLS, + ServicePermissions.ASSIST_LIVE, ServicePermissions.SESSION_REPLAY, + ServicePermissions.DEV_TOOLS)]) def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], context: schemas.CurrentContext = Depends(OR_context)): not_found = {"errors": ["Devtools file not found"]} @@ -440,14 +440,14 @@ def get_live_session_devtools_file(projectId: int, sessionId: Union[int, str], return FileResponse(path=path, media_type="application/octet-stream") -@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.session_replay)]) +@app.post('/{projectId}/heatmaps/url', tags=["heatmaps"], dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def get_heatmaps_by_url(projectId: int, data: schemas.GetHeatMapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return {"data": heatmaps.get_by_url(project_id=projectId, data=data)} @app.post('/{projectId}/sessions/{sessionId}/heatmaps', tags=["heatmaps"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def get_heatmaps_by_session_id_url(projectId: int, sessionId: int, data: schemas.GetHeatMapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -455,7 +455,7 @@ def get_heatmaps_by_session_id_url(projectId: int, sessionId: int, @app.post('/{projectId}/sessions/{sessionId}/clickmaps', tags=["heatmaps"], - dependencies=[OR_scope(Permissions.session_replay, ServicePermissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY, ServicePermissions.SESSION_REPLAY)]) def get_clickmaps_by_session_id_url(projectId: int, sessionId: int, data: schemas.GetClickMapPayloadSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -463,14 +463,14 @@ def get_clickmaps_by_session_id_url(projectId: int, sessionId: int, @app.get('/{projectId}/sessions/{sessionId}/favorite', tags=["sessions"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def add_remove_favorite_session2(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): return sessions_favorite.favorite_session(context=context, project_id=projectId, session_id=sessionId) @app.get('/{projectId}/sessions/{sessionId}/assign', tags=["sessions"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get_by_session(project_id=projectId, session_id=sessionId, tenant_id=context.tenant_id, @@ -483,7 +483,7 @@ def assign_session(projectId: int, sessionId, context: schemas.CurrentContext = @app.get('/{projectId}/sessions/{sessionId}/assign/{issueId}', tags=["sessions", "issueTracking"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def assign_session(projectId: int, sessionId: int, issueId: str, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_assignments.get(project_id=projectId, session_id=sessionId, assignment_id=issueId, @@ -496,7 +496,7 @@ def assign_session(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/assign/{issueId}/comment', tags=["sessions", "issueTracking"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def comment_assignment(projectId: int, sessionId: int, issueId: str, data: schemas.CommentAssignmentSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -511,7 +511,7 @@ def comment_assignment(projectId: int, sessionId: int, issueId: str, @app.post('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): if not sessions.session_exists(project_id=projectId, session_id=sessionId): @@ -526,7 +526,7 @@ def create_note(projectId: int, sessionId: int, data: schemas.SessionNoteSchema @app.get('/{projectId}/sessions/{sessionId}/notes', tags=["sessions", "notes"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_session_notes(tenant_id=context.tenant_id, project_id=projectId, session_id=sessionId, user_id=context.user_id) @@ -538,7 +538,7 @@ def get_session_notes(projectId: int, sessionId: int, context: schemas.CurrentCo @app.post('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.edit(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, @@ -551,14 +551,14 @@ def edit_note(projectId: int, noteId: int, data: schemas.SessionUpdateNoteSchema @app.delete('/{projectId}/notes/{noteId}', tags=["sessions", "notes"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def delete_note(projectId: int, noteId: int, _=Body(None), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.delete(project_id=projectId, note_id=noteId) return data @app.get('/{projectId}/notes/{noteId}/slack/{webhookId}', tags=["sessions", "notes"], - dependencies=[OR_scope(Permissions.session_replay)]) + dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def share_note_to_slack(projectId: int, noteId: int, webhookId: int, context: schemas.CurrentContext = Depends(OR_context)): return sessions_notes.share_to_slack(tenant_id=context.tenant_id, project_id=projectId, user_id=context.user_id, @@ -572,7 +572,7 @@ def share_note_to_msteams(projectId: int, noteId: int, webhookId: int, note_id=noteId, webhook_id=webhookId) -@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.session_replay)]) +@app.post('/{projectId}/notes', tags=["sessions", "notes"], dependencies=[OR_scope(Permissions.SESSION_REPLAY)]) def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): data = sessions_notes.get_all_notes_by_project_id(tenant_id=context.tenant_id, project_id=projectId, @@ -583,7 +583,7 @@ def get_all_notes(projectId: int, data: schemas.SearchNoteSchema = Body(...), @app.post('/{project_id}/feature-flags/search', tags=["feature flags"], - dependencies=[OR_scope(Permissions.feature_flags)]) + dependencies=[OR_scope(Permissions.FEATURE_FLAGS)]) def search_feature_flags(project_id: int, data: schemas.SearchFlagsSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): @@ -591,19 +591,19 @@ def search_feature_flags(project_id: int, @app.get('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], - dependencies=[OR_scope(Permissions.feature_flags)]) + dependencies=[OR_scope(Permissions.FEATURE_FLAGS)]) def get_feature_flag(project_id: int, feature_flag_id: int): return feature_flags.get_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id) -@app.post('/{project_id}/feature-flags', tags=["feature flags"], dependencies=[OR_scope(Permissions.feature_flags)]) +@app.post('/{project_id}/feature-flags', tags=["feature flags"], dependencies=[OR_scope(Permissions.FEATURE_FLAGS)]) def add_feature_flag(project_id: int, data: schemas.FeatureFlagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return feature_flags.create_feature_flag(project_id=project_id, user_id=context.user_id, feature_flag_data=data) @app.put('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], - dependencies=[OR_scope(Permissions.feature_flags)]) + dependencies=[OR_scope(Permissions.FEATURE_FLAGS)]) def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagSchema = Body(...), context: schemas.CurrentContext = Depends(OR_context)): return feature_flags.update_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id, @@ -611,13 +611,13 @@ def update_feature_flag(project_id: int, feature_flag_id: int, data: schemas.Fea @app.delete('/{project_id}/feature-flags/{feature_flag_id}', tags=["feature flags"], - dependencies=[OR_scope(Permissions.feature_flags)]) + dependencies=[OR_scope(Permissions.FEATURE_FLAGS)]) def delete_feature_flag(project_id: int, feature_flag_id: int, _=Body(None)): return {"data": feature_flags.delete_feature_flag(project_id=project_id, feature_flag_id=feature_flag_id)} @app.post('/{project_id}/feature-flags/{feature_flag_id}/status', tags=["feature flags"], - dependencies=[OR_scope(Permissions.feature_flags)]) + dependencies=[OR_scope(Permissions.FEATURE_FLAGS)]) def update_feature_flag_status(project_id: int, feature_flag_id: int, data: schemas.FeatureFlagStatus = Body(...)): return {"data": feature_flags.update_feature_flag_status(project_id=project_id, feature_flag_id=feature_flag_id, diff --git a/ee/api/routers/subs/insights.py b/ee/api/routers/subs/insights.py index 970bcb35f9..12438e433d 100644 --- a/ee/api/routers/subs/insights.py +++ b/ee/api/routers/subs/insights.py @@ -5,7 +5,7 @@ from or_dependencies import OR_scope from routers.base import get_routers -public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)]) +public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.METRICS)]) @app.post('/{projectId}/insights/journey', tags=["insights"]) diff --git a/ee/api/routers/subs/metrics.py b/ee/api/routers/subs/metrics.py index 5c88568830..09fad4796a 100644 --- a/ee/api/routers/subs/metrics.py +++ b/ee/api/routers/subs/metrics.py @@ -7,7 +7,7 @@ from or_dependencies import OR_context, OR_scope from routers.base import get_routers -public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.metrics)]) +public_app, app, app_apikey = get_routers([OR_scope(schemas.Permissions.METRICS)]) @app.post('/{projectId}/dashboards', tags=["dashboard"]) diff --git a/ee/api/schemas/schemas_ee.py b/ee/api/schemas/schemas_ee.py index 31b7f06ac3..f70fa50bf2 100644 --- a/ee/api/schemas/schemas_ee.py +++ b/ee/api/schemas/schemas_ee.py @@ -9,20 +9,20 @@ class Permissions(str, Enum): - session_replay = "SESSION_REPLAY" - dev_tools = "DEV_TOOLS" + SESSION_REPLAY = "SESSION_REPLAY" + DEV_TOOLS = "DEV_TOOLS" # errors = "ERRORS" - metrics = "METRICS" - assist_live = "ASSIST_LIVE" - assist_call = "ASSIST_CALL" - feature_flags = "FEATURE_FLAGS" + METRICS = "METRICS" + ASSIST_LIVE = "ASSIST_LIVE" + ASSIST_CALL = "ASSIST_CALL" + FEATURE_FLAGS = "FEATURE_FLAGS" class ServicePermissions(str, Enum): - session_replay = "SERVICE_SESSION_REPLAY" - dev_tools = "SERVICE_DEV_TOOLS" - assist_live = "SERVICE_ASSIST_LIVE" - assist_call = "SERVICE_ASSIST_CALL" + SESSION_REPLAY = "SERVICE_SESSION_REPLAY" + DEV_TOOLS = "SERVICE_DEV_TOOLS" + ASSIST_LIVE = "SERVICE_ASSIST_LIVE" + ASSIST_CALL = "SERVICE_ASSIST_CALL" class CurrentContext(schemas.CurrentContext): @@ -58,10 +58,10 @@ class SignalsSchema(BaseModel): class InsightCategories(str, Enum): - errors = "errors" - network = "network" - rage = "rage" - resources = "resources" + ERRORS = "errors" + NETWORK = "network" + RAGE = "rage" + RESOURCES = "resources" class GetInsightsSchema(schemas._TimedSchema): @@ -89,12 +89,12 @@ class TrailSearchPayloadSchema(schemas._PaginatedSchema): user_id: Optional[int] = Field(default=None) query: Optional[str] = Field(default=None) action: Optional[str] = Field(default=None) - order: schemas.SortOrderType = Field(default=schemas.SortOrderType.desc) + order: schemas.SortOrderType = Field(default=schemas.SortOrderType.DESC) @model_validator(mode="before") def transform_order(cls, values): if values.get("order") is None: - values["order"] = schemas.SortOrderType.desc + values["order"] = schemas.SortOrderType.DESC else: values["order"] = values["order"].upper() return values @@ -146,7 +146,6 @@ class AssistRecordSearchPayloadSchema(schemas._PaginatedSchema, schemas._TimedSc query: Optional[str] = Field(default=None) order: Literal["asc", "desc"] = Field(default="desc") - # TODO: move these to schema when Insights is supported on PG class CardInsights(schemas.CardInsights): metric_value: List[InsightCategories] = Field(default=[])