Skip to content

Commit

Permalink
cleanup, adds changelog
Browse files Browse the repository at this point in the history
  • Loading branch information
kim committed Nov 17, 2023
1 parent 06d7ad9 commit 1ad1c8f
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 29 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

### Changed
- Constants are no longer top level import, are now accessible through respective modules
- `processingLevel` and `platform` are now aliased by collection concept-ids, (lists of concept ids by their processing levels/platforms viewable in `dataset.py`), improving performance and dodging subquery system

------
## [v6.7.1](https://github.com/asfadmin/Discovery-asf_search/compare/v6.7.0...v6.7.1)
Expand Down
30 changes: 1 addition & 29 deletions asf_search/CMR/translate.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@
from shapely.geometry import Polygon
from shapely.geometry.base import BaseGeometry
from .field_map import field_map
from .datasets import dataset_collections, collections_per_platform

from numpy import intersect1d
from .datasets import dataset_collections

import logging

Expand Down Expand Up @@ -50,32 +48,6 @@ def translate_opts(opts: ASFSearchOptions) -> list:
if any(key in dict_opts for key in ['start', 'end', 'season']):
dict_opts = fix_date(dict_opts)

# if 'dataset' in dict_opts:
# if 'collections' not in dict_opts:
# dict_opts['collections'] = []

# for dataset in dict_opts['dataset']:
# if collections_by_short_name := dataset_collections.get(dataset):
# for concept_ids in collections_by_short_name.values():
# dict_opts['collections'].extend(concept_ids)
# else:
# raise ValueError(f'Could not find dataset named "{dataset}" provided for dataset keyword.')

# dict_opts.pop('dataset')

# if 'platform' in dict_opts:
# if 'collections' not in dict_opts:
# dict_opts['collections'] = []

# missing = [platform for platform in dict_opts['platform'] if collections_per_platform.get(platform) is None]

# # collections limit platform searches, so if there are any we don't have collections for we skip this optimization
# if len(missing) == 0:
# for platform in dict_opts['platform']:
# if (collections := collections_per_platform.get(platform.upper())):
# dict_opts['collections'].extend(collections)
# print(f"optimizing for platform search {dict_opts['platform']}")
# dict_opts.pop('platform')
# convert the above parameters to a list of key/value tuples
cmr_opts = []
for (key, val) in dict_opts.items():
Expand Down

0 comments on commit 1ad1c8f

Please sign in to comment.