-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #91 from ElemarJR/last_twelve_months_view
Last twelve months view
- Loading branch information
Showing
31 changed files
with
1,050 additions
and
96 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,7 +1,8 @@ | ||
from ariadne import QueryType, ObjectType | ||
|
||
from .staleliness import resolve_staleliness | ||
|
||
from .allocation import resolve_allocation | ||
def setup_query_for_operational_summaries(query: QueryType): | ||
query.set_field('staleliness', resolve_staleliness) | ||
query.set_field('allocation', resolve_allocation) | ||
return [] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
from datetime import datetime | ||
|
||
from omni_shared import globals | ||
|
||
def resolve_allocation(root, info, start_date=None, end_date=None, filters=None): | ||
# Process dates | ||
def parse_date(date_input): | ||
if date_input is None: | ||
return datetime.now() | ||
if isinstance(date_input, str): | ||
return datetime.strptime(date_input, '%Y-%m-%d') | ||
return date_input | ||
|
||
start_date = parse_date(start_date) | ||
end_date = parse_date(end_date) | ||
|
||
# Get and filter timesheet data | ||
timesheet = globals.omni_datasets.timesheets.get(start_date, end_date) | ||
df, result = globals.omni_datasets.apply_filters( | ||
globals.omni_datasets.timesheets, | ||
timesheet.data, | ||
filters | ||
) | ||
|
||
# Define kind mappings | ||
kind_map = { | ||
'Consulting': 'consulting', | ||
'Internal': 'internal', | ||
'HandsOn': 'hands_on', | ||
'Squad': 'squad' | ||
} | ||
|
||
# Initialize structure with empty lists for each kind | ||
by_kind = {normalized: [] for normalized in kind_map.values()} | ||
|
||
# Process allocations in one pass | ||
daily_allocation = df.groupby(['Date', 'Kind'])['TimeInHs'].sum() | ||
|
||
for (date, kind), hours in daily_allocation.items(): | ||
if normalized_kind := kind_map.get(kind): | ||
by_kind[normalized_kind].append({ | ||
'date': date.strftime('%Y-%m-%d'), | ||
'hours': float(hours) | ||
}) | ||
|
||
return { | ||
'by_kind': by_kind, | ||
'filterable_fields': result['filterable_fields'] | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
4 changes: 4 additions & 0 deletions
4
backend/models/src/omni_models/datasets/timesheet_dataset/__init__.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
from .main import TimesheetDataset | ||
from .models.memory_cache import TimesheetMemoryCache | ||
|
||
__all__ = ['TimesheetDataset', 'TimesheetMemoryCache'] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
55 changes: 55 additions & 0 deletions
55
backend/models/src/omni_models/datasets/timesheet_dataset/models/disk_cache.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,55 @@ | ||
import pickle | ||
import base64 | ||
from pathlib import Path | ||
from cryptography.fernet import Fernet | ||
from cryptography.hazmat.primitives import hashes | ||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC | ||
|
||
class TimesheetDiskCache: | ||
def __init__(self, cache_dir: str, api_key: str): | ||
self.cache_dir = Path(cache_dir) | ||
self.cache_dir.mkdir(parents=True, exist_ok=True) | ||
self.fernet = self._get_encryption_key(api_key) | ||
|
||
def _get_encryption_key(self, api_key: str) -> Fernet: | ||
salt = b'omni_salt' | ||
kdf = PBKDF2HMAC( | ||
algorithm=hashes.SHA256(), | ||
length=32, | ||
salt=salt, | ||
iterations=100000, | ||
) | ||
key = base64.urlsafe_b64encode(kdf.derive(api_key.encode())) | ||
return Fernet(key) | ||
|
||
def save(self, dataset, filename: str) -> None: | ||
"""Save an encrypted timesheet dataset to file""" | ||
if dataset is None: | ||
return | ||
|
||
filepath = self.cache_dir / f"{filename}.timesheet" | ||
|
||
# Serialize and encrypt the dataset | ||
serialized = pickle.dumps(dataset) | ||
encrypted = self.fernet.encrypt(serialized) | ||
|
||
# Save to file | ||
with open(filepath, "wb") as file: | ||
file.write(encrypted) | ||
|
||
def load(self, filename: str): | ||
"""Load an encrypted timesheet dataset from file""" | ||
try: | ||
filepath = self.cache_dir / f"{filename}.timesheet" | ||
|
||
# Read encrypted data | ||
with open(filepath, "rb") as file: | ||
encrypted = file.read() | ||
|
||
# Decrypt and deserialize | ||
decrypted = self.fernet.decrypt(encrypted) | ||
return pickle.loads(decrypted) | ||
except: | ||
return None | ||
|
||
|
Oops, something went wrong.