Skip to content

Commit

Permalink
Merge pull request #786 from roboflow/fix/aggregate-usage-for-streams…
Browse files Browse the repository at this point in the history
…-and-photos-separately

Aggregate usage for streams and photos separately
  • Loading branch information
grzegorz-roboflow authored Nov 7, 2024
2 parents e13d72c + 9664c78 commit b267e96
Show file tree
Hide file tree
Showing 3 changed files with 54 additions and 28 deletions.
9 changes: 4 additions & 5 deletions inference/usage_tracking/collector.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import asyncio
import atexit
import datetime
import json
import mimetypes
import socket
Expand Down Expand Up @@ -246,9 +245,10 @@ def system_info(
hostname = ""
if dedicated_deployment_id:
hostname = f"{dedicated_deployment_id}:{hostname}"
if ip_address:
ip_address_hash_hex = sha256_hash(ip_address)
else:
hostname = sha256_hash(hostname)

if not ip_address:
try:
ip_address: str = socket.gethostbyname(socket.gethostname())
except Exception as exc:
Expand All @@ -263,8 +263,7 @@ def system_info(

if s:
s.close()

ip_address_hash_hex = sha256_hash(ip_address)
ip_address_hash_hex = sha256_hash(ip_address)

return {
"hostname": hostname,
Expand Down
35 changes: 20 additions & 15 deletions inference/usage_tracking/payload_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,17 +82,13 @@ def zip_usage_payloads(usage_payloads: List[APIKeyUsage]) -> List[APIKeyUsage]:
resource_usage_exec_session_id = (
api_key_usage_by_exec_session_id.setdefault(resource_usage_key, {})
)
if not resource_usage_payload.get("fps"):
resource_usage_exec_session_id.setdefault("", []).append(
resource_usage_payload
)
continue
exec_session_id = resource_usage_payload.get("exec_session_id", "")
resource_usage_exec_session_id.setdefault(exec_session_id, []).append(
resource_usage_payload
)

merged_exec_session_id_usage_payloads: Dict[str, APIKeyUsage] = {}
merged_exec_session_id_streams_usage_payloads: Dict[str, APIKeyUsage] = {}
merged_exec_session_id_photos_usage_payloads: Dict[str, APIKeyUsage] = {}
for (
api_key_hash,
api_key_usage_by_exec_session_id,
Expand All @@ -105,15 +101,22 @@ def zip_usage_payloads(usage_payloads: List[APIKeyUsage]) -> List[APIKeyUsage]:
exec_session_id,
usage_payloads,
) in resource_usage_exec_session_id.items():
merged_api_key_usage_payloads = (
merged_exec_session_id_usage_payloads.setdefault(
exec_session_id, {}
)
)
merged_api_key_payload = merged_api_key_usage_payloads.setdefault(
api_key_hash, {}
)
for resource_usage_payload in usage_payloads:
if resource_usage_payload.get("fps"):
merged_api_key_usage_payloads = (
merged_exec_session_id_streams_usage_payloads.setdefault(
exec_session_id, {}
)
)
else:
merged_api_key_usage_payloads = (
merged_exec_session_id_photos_usage_payloads.setdefault(
exec_session_id, {}
)
)
merged_api_key_payload = merged_api_key_usage_payloads.setdefault(
api_key_hash, {}
)
merged_resource_payload = merged_api_key_payload.setdefault(
resource_usage_key, {}
)
Expand All @@ -122,7 +125,9 @@ def zip_usage_payloads(usage_payloads: List[APIKeyUsage]) -> List[APIKeyUsage]:
resource_usage_payload,
)

zipped_payloads = list(merged_exec_session_id_usage_payloads.values())
zipped_payloads = list(
merged_exec_session_id_streams_usage_payloads.values()
) + list(merged_exec_session_id_photos_usage_payloads.values())
if system_info_payload:
system_info_api_key_hash = next(iter(system_info_payload.values()))[
"api_key_hash"
Expand Down
38 changes: 30 additions & 8 deletions tests/inference/unit_tests/usage_tracking/test_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -764,17 +764,17 @@ def test_zip_usage_payloads_with_different_exec_session_ids():
"fps": 10,
"exec_session_id": "session_2",
},
},
}
},
{
"fake_api1_hash": {
"resource2": {
"api_key_hash": "fake_api1_hash",
"resource_id": "resource2",
"timestamp_start": 1721032989934855000,
"timestamp_stop": 1721032989934855004,
"processed_frames": 2,
"exec_session_id": "session_2",
"timestamp_stop": 1721032989934855001,
"processed_frames": 1,
"exec_session_id": "session_1",
},
"resource3": {
"api_key_hash": "fake_api1_hash",
Expand All @@ -790,9 +790,9 @@ def test_zip_usage_payloads_with_different_exec_session_ids():
"api_key_hash": "fake_api2_hash",
"resource_id": "resource2",
"timestamp_start": 1721032989934856000,
"timestamp_stop": 1721032989934856004,
"processed_frames": 2,
"exec_session_id": "session_2",
"timestamp_stop": 1721032989934856001,
"processed_frames": 1,
"exec_session_id": "session_1",
},
"resource3": {
"api_key_hash": "fake_api2_hash",
Expand All @@ -804,6 +804,28 @@ def test_zip_usage_payloads_with_different_exec_session_ids():
},
},
},
{
"fake_api1_hash": {
"resource2": {
"api_key_hash": "fake_api1_hash",
"resource_id": "resource2",
"timestamp_start": 1721032989934855003,
"timestamp_stop": 1721032989934855004,
"processed_frames": 1,
"exec_session_id": "session_2",
},
},
"fake_api2_hash": {
"resource2": {
"api_key_hash": "fake_api2_hash",
"resource_id": "resource2",
"timestamp_start": 1721032989934856003,
"timestamp_stop": 1721032989934856004,
"processed_frames": 1,
"exec_session_id": "session_2",
},
},
},
]


Expand All @@ -827,7 +849,7 @@ def test_system_info_with_no_dedicated_deployment_id():

# then
expected_system_info = {
"hostname": f"hostname01",
"hostname": "5aacc",
"ip_address_hash": hashlib.sha256("w.x.y.z".encode()).hexdigest()[:5],
"is_gpu_available": False,
}
Expand Down

0 comments on commit b267e96

Please sign in to comment.