From 197949fedeba65d63bb05a1c3f104d1e830acfcc Mon Sep 17 00:00:00 2001 From: Shane Maloney Date: Tue, 12 Mar 2024 15:42:04 +0000 Subject: [PATCH] Initial repo and code setup --- .cruft.json | 31 ++ .flake8 | 27 ++ .github/workflows/ci.yml | 97 ++++ .github/workflows/sub_package_update.yml | 84 ++++ .gitignore | 263 +++++++++++ .pre-commit-config.yaml | 22 + .readthedocs.yaml | 29 ++ .rtd-environment.yml | 7 + .ruff.toml | 39 ++ MANIFEST.in | 11 + README.rst | 43 ++ chimerapy/__init__.py | 3 + chimerapy/_dev/__init__.py | 6 + chimerapy/_dev/scm_version.py | 12 + chimerapy/chimera.py | 561 +++++++++++++++++++++++ chimerapy/data/README.rst | 6 + chimerapy/tests/__init__.py | 4 + chimerapy/tests/test_chimera.py | 17 + chimerapy/version.py | 17 + docs/Makefile | 20 + docs/conf.py | 77 ++++ docs/index.rst | 15 + docs/make.bat | 35 ++ licenses/LICENSE.rst | 25 + licenses/README.rst | 9 + licenses/TEMPLATE_LICENSE.rst | 31 ++ pyproject.toml | 90 ++++ setup.py | 4 + tox.ini | 76 +++ 29 files changed, 1661 insertions(+) create mode 100644 .cruft.json create mode 100644 .flake8 create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/sub_package_update.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .readthedocs.yaml create mode 100644 .rtd-environment.yml create mode 100644 .ruff.toml create mode 100644 MANIFEST.in create mode 100644 README.rst create mode 100644 chimerapy/__init__.py create mode 100644 chimerapy/_dev/__init__.py create mode 100644 chimerapy/_dev/scm_version.py create mode 100644 chimerapy/chimera.py create mode 100644 chimerapy/data/README.rst create mode 100644 chimerapy/tests/__init__.py create mode 100644 chimerapy/tests/test_chimera.py create mode 100644 chimerapy/version.py create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/make.bat create mode 100644 licenses/LICENSE.rst create mode 100644 licenses/README.rst create mode 100644 licenses/TEMPLATE_LICENSE.rst create mode 100644 pyproject.toml create mode 100755 setup.py create mode 100644 tox.ini diff --git a/.cruft.json b/.cruft.json new file mode 100644 index 0000000..2fdf8d5 --- /dev/null +++ b/.cruft.json @@ -0,0 +1,31 @@ +{ + "template": "https://github.com/sunpy/package-template", + "commit": "d7d9c32538c7bc961c06d901847d8d4f75e4d6cf", + "checkout": null, + "context": { + "cookiecutter": { + "package_name": "CHIMERApy", + "module_name": "chimerapy", + "short_description": "CHIMERApy is a python implmentation of the CHIMERA coronal hole detection algorithm.", + "author_name": "DIAS Solar", + "author_email": "", + "project_url": "chimerapy.readthedocs.io", + "license": "BSD 3-Clause", + "minimum_python_version": "3.9", + "use_compiled_extensions": "n", + "enable_dynamic_dev_versions": "y", + "include_example_code": "n", + "include_cruft_update_github_workflow": "y", + "_sphinx_theme": "alabaster", + "_parent_project": "", + "_install_requires": "", + "_copy_without_render": [ + "docs/_templates", + "docs/_static", + ".github/workflows/sub_package_update.yml" + ], + "_template": "https://github.com/sunpy/package-template" + } + }, + "directory": null +} diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..e1799e6 --- /dev/null +++ b/.flake8 @@ -0,0 +1,27 @@ +[flake8] +ignore = + # missing-whitespace-around-operator + E225 + # missing-whitespace-around-arithmetic-operator + E226 + # line-too-long + E501 + # unused-import + F401 + # undefined-local-with-import-star + F403 + # redefined-while-unused + F811 + # Line break occurred before a binary operator + W503, + # Line break occurred after a binary operator + W504 +max-line-length = 110 +exclude = + .git + __pycache__ + docs/conf.py + build + CHIMERApy/__init__.py, +rst-directives = + plot diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..3c3d0a3 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,97 @@ + +name: CI + +on: + push: + pull_request: + branches: + - 'main' + - '*.*' + - '!*backport*' + tags: + - 'v*' + - '!*dev*' + - '!*pre*' + - '!*post*' + # Allow manual runs through the web UI + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + core: + uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main + with: + submodules: false + coverage: codecov + toxdeps: tox-pypi-filter + envs: | + - linux: py310 + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + sdist_verify: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.10' + - run: python -m pip install -U --user build + - run: python -m build . --sdist + - run: python -m pip install -U --user twine + - run: python -m twine check dist/* + + test: + needs: [core, sdist_verify] + uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main + with: + submodules: false + coverage: codecov + toxdeps: tox-pypi-filter + envs: | + - windows: py310 + - macos: py310 + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + docs: + needs: [core] + uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main + with: + default_python: '3.10' + submodules: false + pytest: false + toxdeps: tox-pypi-filter + envs: | + - linux: build_docs + + cron: + if: | + github.event_name == 'workflow_dispatch' || ( + github.event_name == 'pull_request' && + contains(github.event.pull_request.labels.*.name, 'Run cron CI') + ) + uses: OpenAstronomy/github-actions-workflows/.github/workflows/tox.yml@main + with: + default_python: '3.10' + submodules: false + coverage: codecov + toxdeps: tox-pypi-filter + envs: | + - linux: py311-devdeps + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + + publish_pure: + needs: [test, docs] + uses: OpenAstronomy/github-actions-workflows/.github/workflows/publish_pure_python.yml@main + with: + python-version: '3.10' + test_extras: 'tests' + test_command: 'pytest -p no:warnings --doctest-rst --pyargs chimerapy' + submodules: false + secrets: + pypi_token: ${{ secrets.pypi_token }} diff --git a/.github/workflows/sub_package_update.yml b/.github/workflows/sub_package_update.yml new file mode 100644 index 0000000..94a9e7e --- /dev/null +++ b/.github/workflows/sub_package_update.yml @@ -0,0 +1,84 @@ +# This template is taken from the cruft example code, for further information please see: +# https://cruft.github.io/cruft/#automating-updates-with-github-actions +name: Automatic Update from package template +permissions: + contents: write + pull-requests: write + +on: + # Allow manual runs through the web UI + workflow_dispatch: + schedule: + # ┌───────── minute (0 - 59) + # │ ┌───────── hour (0 - 23) + # │ │ ┌───────── day of the month (1 - 31) + # │ │ │ ┌───────── month (1 - 12 or JAN-DEC) + # │ │ │ │ ┌───────── day of the week (0 - 6 or SUN-SAT) + - cron: '0 7 * * 1' # Every Monday at 7am UTC + +jobs: + update: + runs-on: ubuntu-latest + strategy: + fail-fast: true + matrix: + include: + - add-paths: . + body: apply the changes to this repo. + branch: cruft/update + commit-message: "Automatic package template update" + title: Updates from the package template + - add-paths: .cruft.json + body: reject these changes for this repo. + branch: cruft/reject + commit-message: "Reject this package template update" + title: Reject new updates from package template + + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Cruft + run: python -m pip install cruft + + - name: Check if update is available + continue-on-error: false + id: check + run: | + CHANGES=0 + if [ -f .cruft.json ]; then + if ! cruft check; then + CHANGES=1 + fi + else + echo "No .cruft.json file" + fi + + echo "has_changes=$CHANGES" >> "$GITHUB_OUTPUT" + + - name: Run update if available + if: steps.check.outputs.has_changes == '1' + run: | + git config --global user.email "${{ github.actor }}@users.noreply.github.com" + git config --global user.name "${{ github.actor }}" + + cruft update --skip-apply-ask --refresh-private-variables + git restore --staged . + + - name: Create pull request + if: steps.check.outputs.has_changes == '1' + uses: peter-evans/create-pull-request@v6 + with: + token: ${{ secrets.GITHUB_TOKEN }} + add-paths: ${{ matrix.add-paths }} + commit-message: ${{ matrix.commit-message }} + branch: ${{ matrix.branch }} + delete-branch: true + branch-suffix: timestamp + title: ${{ matrix.title }} + body: | + This is an autogenerated PR, which will ${{ matrix.body }}. + [Cruft](https://cruft.github.io/cruft/) has detected updates from the Package Template diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6d6f1ba --- /dev/null +++ b/.gitignore @@ -0,0 +1,263 @@ +### Python: https://raw.githubusercontent.com/github/gitignore/master/Python.gitignore + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class +tmp/ + +# C extensions +*.so + +# Distribution / packaging +.Python +pip-wheel-metadata/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +chimerapy/_version.py +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +# automodapi +docs/api + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# Pyre type checker +.pyre/ + +# IDE +# PyCharm +.idea + +# Spyder project settings +.spyderproject +.spyproject + +### VScode: https://raw.githubusercontent.com/github/gitignore/master/Global/VisualStudioCode.gitignore +.vscode/* +.vs/* + +### https://raw.github.com/github/gitignore/master/Global/OSX.gitignore +.DS_Store +.AppleDouble +.LSOverride + +# Icon must ends with two \r. +Icon + +# Thumbnails +._* + +# Files that might appear on external disk +.Spotlight-V100 +.Trashes + +### Linux: https://raw.githubusercontent.com/github/gitignore/master/Global/Linux.gitignore +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +# pytype static type analyzer +.pytype/ + +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### Windows: https://raw.githubusercontent.com/github/gitignore/master/Global/Windows.gitignore + +# Windows thumbnail cache files +Thumbs.db +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +### Extra Python Items and SunPy Specific +docs/whatsnew/latest_changelog.txt +examples/**/*.csv +figure_test_images* +tags +baseline + +# Release script +.github_cache + +# Misc Stuff +.history +*.orig +.tmp +node_modules/ +package-lock.json +package.json +.prettierrc + +# Log files generated by 'vagrant up' +*.log + +.project diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..59d7541 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +repos: + # This should be before any formatting hooks like isort + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: "v0.2.1" + hooks: + - id: ruff + args: ["--fix"] + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-ast + - id: check-case-conflict + - id: trailing-whitespace + exclude: ".*(.fits|.fts|.fit|.header|.txt)$" + - id: check-yaml + - id: debug-statements + - id: check-added-large-files + args: ["--enforce-all", "--maxkb=1054"] + - id: end-of-file-fixer + exclude: ".*(.fits|.fts|.fit|.header|.txt|tca.*|.json)$|^CITATION.rst$" + - id: mixed-line-ending + exclude: ".*(.fits|.fts|.fit|.header|.txt|tca.*)$" diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..790abab --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,29 @@ +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "mambaforge-4.10" + jobs: + post_checkout: + - git fetch --unshallow || true + pre_install: + - git update-index --assume-unchanged .rtd-environment.yml docs/conf.py + +conda: + environment: .rtd-environment.yml + +sphinx: + builder: html + configuration: docs/conf.py + fail_on_warning: false + +formats: + - htmlzip + +python: + install: + - method: pip + extra_requirements: + - docs + path: . diff --git a/.rtd-environment.yml b/.rtd-environment.yml new file mode 100644 index 0000000..62af372 --- /dev/null +++ b/.rtd-environment.yml @@ -0,0 +1,7 @@ +name: CHIMERApy +channels: + - conda-forge +dependencies: + - python=3.10 + - pip + - graphviz!=2.42.*,!=2.43.* diff --git a/.ruff.toml b/.ruff.toml new file mode 100644 index 0000000..007308e --- /dev/null +++ b/.ruff.toml @@ -0,0 +1,39 @@ +target-version = "py39" +line-length = 110 +exclude = [ + ".git,", + "__pycache__", + "build", + "CHIMERApy/version.py", +] + +[lint] +select = ["E", "F", "W", "UP", "PT"] +extend-ignore = [ + # pycodestyle (E, W) + "E501", # LineTooLong # TODO! fix + # pytest (PT) + "PT001", # Always use pytest.fixture() + "PT004", # Fixtures which don't return anything should have leading _ + "PT007", # Parametrize should be lists of tuples # TODO! fix + "PT011", # Too broad exception assert # TODO! fix + "PT023", # Always use () on pytest decorators +] + +[flake8-tidy-imports] +[flake8-tidy-imports.banned-api] +"warnings.warn".msg = "Use sunpy specific warning helpers warn_* from sunpy.utils.exceptions" + +[per-file-ignores] +# Part of configuration, not a package. +"setup.py" = ["INP001"] +"conftest.py" = ["INP001"] +# Implicit-namespace-package. The examples are not a package. +"docs/*.py" = ["INP001"] +"__init__.py" = ["E402", "F401", "F403"] +"test_*.py" = ["B011", "D", "E402", "PGH001", "S101"] +# Need to import clients to register them, but don't use them in file +"CHIMERApy/net/__init__.py" = ["F811"] + +[pydocstyle] +convention = "numpy" diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..6eabe6b --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,11 @@ +# Exclude specific files +# All files which are tracked by git and not explicitly excluded here are included by setuptools_scm +# Prune folders +prune build +prune docs/_build +prune docs/api +global-exclude *.pyc *.o + +# This subpackage is only used in development checkouts +# and should not be included in built tarballs +prune chimerapy/_dev diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..b7adafb --- /dev/null +++ b/README.rst @@ -0,0 +1,43 @@ +CHIMERApy is a python implmentation of the CHIMERA coronal hole detection algorithm. +------------------------------------------------------------------------------------ + +License +------- + +This project is Copyright (c) DIAS Solar and licensed under +the terms of the BSD 3-Clause license. This package is based upon +the `Openastronomy packaging guide `_ +which is licensed under the BSD 3-clause licence. See the licenses folder for +more information. + +Contributing +------------ + +We love contributions! CHIMERApy is open source, +built on open source, and we'd love to have you hang out in our community. + +**Imposter syndrome disclaimer**: We want your help. No, really. + +There may be a little voice inside your head that is telling you that you're not +ready to be an open source contributor; that your skills aren't nearly good +enough to contribute. What could you possibly offer a project like this one? + +We assure you - the little voice in your head is wrong. If you can write code at +all, you can contribute code to open source. Contributing to open source +projects is a fantastic way to advance one's coding skills. Writing perfect code +isn't the measure of a good developer (that would disqualify all of us!); it's +trying to create something, making mistakes, and learning from those +mistakes. That's how we all improve, and we are happy to help others learn. + +Being an open source contributor doesn't just mean writing code, either. You can +help out by writing documentation, tests, or even giving feedback about the +project (and yes - that includes giving feedback about the contribution +process). Some of these contributions may be the most valuable to the project as +a whole, because you're coming to the project with fresh eyes, so you can see +the errors and assumptions that seasoned contributors have glossed over. + +Note: This disclaimer was originally written by +`Adrienne Lowe `_ for a +`PyCon talk `_, and was adapted by +CHIMERApy based on its use in the README file for the +`MetPy project `_. diff --git a/chimerapy/__init__.py b/chimerapy/__init__.py new file mode 100644 index 0000000..20b38d5 --- /dev/null +++ b/chimerapy/__init__.py @@ -0,0 +1,3 @@ +from .version import version as __version__ + +__all__ = [] diff --git a/chimerapy/_dev/__init__.py b/chimerapy/_dev/__init__.py new file mode 100644 index 0000000..72583c0 --- /dev/null +++ b/chimerapy/_dev/__init__.py @@ -0,0 +1,6 @@ +""" +This package contains utilities that are only used when developing drms in a +copy of the source repository. +These files are not installed, and should not be assumed to exist at +runtime. +""" diff --git a/chimerapy/_dev/scm_version.py b/chimerapy/_dev/scm_version.py new file mode 100644 index 0000000..b9afb1d --- /dev/null +++ b/chimerapy/_dev/scm_version.py @@ -0,0 +1,12 @@ +# Try to use setuptools_scm to get the current version; this is only used +# in development installations from the git repository. +import os.path + +try: + from setuptools_scm import get_version + + version = get_version(root=os.path.join("..", ".."), relative_to=__file__) +except ImportError: + raise +except Exception as e: + raise ValueError("setuptools_scm can not determine version.") from e diff --git a/chimerapy/chimera.py b/chimerapy/chimera.py new file mode 100644 index 0000000..e1b8723 --- /dev/null +++ b/chimerapy/chimera.py @@ -0,0 +1,561 @@ +""" + +""" + + +from astropy import wcs +from astropy.io import fits +from astropy.modeling.models import Gaussian2D +from skimage.util import img_as_ubyte + +import astropy.units as u + +import cv2 +import glob +import mahotas +import matplotlib.pyplot as plt +import numpy as np +import scipy.interpolate +import sunpy.map +import sys + + +def chimera_legacy(): + file_path = "./" + + im171 = glob.glob(file_path + "*171*.fts.gz") + im193 = glob.glob(file_path + "*193*.fts.gz") + im211 = glob.glob(file_path + "*211*.fts.gz") + imhmi = glob.glob(file_path + "*hmi*.fts.gz") + + if im171 == [] or im193 == [] or im211 == [] or imhmi == []: + print("Not all required files present") + sys.exit() + + # =====Reads in data and resizes images===== + x = np.arange(0, 1024) * 4 + hdu_number = 0 + heda = fits.getheader(im171[0], hdu_number) + data = fits.getdata(im171[0], ext=0) / (heda["EXPTIME"]) + dn = scipy.interpolate.interp2d(x, x, data) + data = dn(np.arange(0, 4096), np.arange(0, 4096)) + + hedb = fits.getheader(im193[0], hdu_number) + datb = fits.getdata(im193[0], ext=0) / (hedb["EXPTIME"]) + dn = scipy.interpolate.interp2d(x, x, datb) + datb = dn(np.arange(0, 4096), np.arange(0, 4096)) + + hedc = fits.getheader(im211[0], hdu_number) + datc = fits.getdata(im211[0], ext=0) / (hedc["EXPTIME"]) + dn = scipy.interpolate.interp2d(x, x, datc) + datc = dn(np.arange(0, 4096), np.arange(0, 4096)) + + hedm = fits.getheader(imhmi[0], hdu_number) + datm = fits.getdata(imhmi[0], ext=0) + # dn = scipy.interpolate.interp2d(np.arange(4096), np.arange(4096), datm) + # datm = dn(np.arange(0, 1024)*4, np.arange(0, 1024)*4) + + if hedm["crota1"] > 90: + datm = np.rot90(np.rot90(datm)) + + # =====Specifies solar radius and calculates conversion value of pixel to arcsec===== + + s = np.shape(data) + rs = heda["rsun"] + + if hedb["ctype1"] != "solar_x ": + hedb["ctype1"] = "solar_x " + hedb["ctype2"] = "solar_y " + + if heda["cdelt1"] > 1: + heda["cdelt1"], heda["cdelt2"], heda["crpix1"], heda["crpix2"] = ( + heda["cdelt1"] / 4.0, + heda["cdelt2"] / 4.0, + heda["crpix1"] * 4.0, + heda["crpix2"] * 4.0, + ) + hedb["cdelt1"], hedb["cdelt2"], hedb["crpix1"], hedb["crpix2"] = ( + hedb["cdelt1"] / 4.0, + hedb["cdelt2"] / 4.0, + hedb["crpix1"] * 4.0, + hedb["crpix2"] * 4.0, + ) + hedc["cdelt1"], hedc["cdelt2"], hedc["crpix1"], hedc["crpix2"] = ( + hedc["cdelt1"] / 4.0, + hedc["cdelt2"] / 4.0, + hedc["crpix1"] * 4.0, + hedc["crpix2"] * 4.0, + ) + + dattoarc = heda["cdelt1"] + conver = (s[0] / 2) * dattoarc / hedm["cdelt1"] - (s[1] / 2) + convermul = dattoarc / hedm["cdelt1"] + + # =====Alternative coordinate systems===== + + hdul = fits.open(im171[0]) + hdul[0].header['CUNIT1'] = 'arcsec' + hdul[0].header['CUNIT2'] = 'arcsec' + aia = sunpy.map.Map(hdul[0].data, hdul[0].header) + adj = 4096.0 / aia.dimensions[0].value + x, y = (np.meshgrid(*[np.arange(adj * v.value) for v in aia.dimensions]) * u.pixel) / adj + hpc = aia.pixel_to_world(x, y) + hg = hpc.transform_to(sunpy.coordinates.frames.HeliographicStonyhurst) + + csys = wcs.WCS(hedb) + + # =======setting up arrays to be used============ + + ident = 1 + iarr = np.zeros((s[0], s[1]), dtype=np.byte) + offarr, slate = np.array(iarr), np.array(iarr) + bmcool = np.zeros((s[0], s[1]), dtype=np.float32) + cand, bmmix, bmhot = np.array(bmcool), np.array(bmcool), np.array(bmcool) + circ = np.zeros((s[0], s[1]), dtype=int) + + # =======creation of a 2d gaussian for magnetic cut offs=========== + + r = (s[1] / 2.0) - 450 + xgrid, ygrid = np.meshgrid(np.arange(s[0]), np.arange(s[1])) + center = [int(s[1] / 2.0), int(s[1] / 2.0)] + w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 > r**2) + y, x = np.mgrid[0:4096, 0:4096] + garr = Gaussian2D(1, s[0] / 2, s[1] / 2, 2000 / 2.3548, 2000 / 2.3548)(x, y) + garr[w] = 1.0 + + # ======creation of array for CH properties========== + + props = np.zeros((26, 30), dtype="", + "", + "", + "BMAX", + "BMIN", + "TOT_B+", + "TOT_B-", + "", + "", + "", + ) + props[:, 1] = ( + "num", + '"', + '"', + "H°", + '"', + '"', + '"', + '"', + '"', + '"', + '"', + '"', + "H°", + "°", + "Mm^2", + "%", + "G", + "G", + "G", + "G", + "G", + "G", + "G", + "Mx", + "Mx", + "Mx", + ) + + # =====removes negative data values===== + + data[np.where(data <= 0)] = 0 + datb[np.where(datb <= 0)] = 0 + datc[np.where(datc <= 0)] = 0 + + # ============make a multi-wavelength image for contours================== + + with np.errstate(divide="ignore"): + t0 = np.log10(datc) + t1 = np.log10(datb) + t2 = np.log10(data) + + t0[np.where(t0 < 0.8)] = 0.8 + t0[np.where(t0 > 2.7)] = 2.7 + t1[np.where(t1 < 1.4)] = 1.4 + t1[np.where(t1 > 3.0)] = 3.0 + t2[np.where(t2 < 1.2)] = 1.2 + t2[np.where(t2 > 3.9)] = 3.9 + + t0 = np.array(((t0 - 0.8) / (2.7 - 0.8)) * 255, dtype=np.float32) + t1 = np.array(((t1 - 1.4) / (3.0 - 1.4)) * 255, dtype=np.float32) + t2 = np.array(((t2 - 1.2) / (3.9 - 1.2)) * 255, dtype=np.float32) + + # ====create 3 segmented bitmasks===== + + with np.errstate(divide="ignore", invalid="ignore"): + bmmix[np.where(t2 / t0 >= ((np.mean(data) * 0.6357) / (np.mean(datc))))] = 1 + bmhot[np.where(t0 + t1 < (0.7 * (np.mean(datb) + np.mean(datc))))] = 1 + bmcool[np.where(t2 / t1 >= ((np.mean(data) * 1.5102) / (np.mean(datb))))] = 1 + + # ====logical conjunction of 3 segmentations======= + + cand = bmcool * bmmix * bmhot + + # ====plot tricolour image with lon/lat conotours======= + + # ======removes off detector mis-identifications========== + + r = (s[1] / 2.0) - 100 + w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 <= r**2) + circ[w] = 1.0 + cand = cand * circ + + # =======Seperates on-disk and off-limb CHs=============== + + circ[:] = 0 + r = (rs / dattoarc) - 10 + w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 <= r**2) + circ[w] = 1.0 + r = (rs / dattoarc) + 40 + w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 >= r**2) + circ[w] = 1.0 + cand = cand * circ + + # ====open file for property storage===== + + # =====contours the identified datapoints======= + + cand = np.array(cand, dtype=np.uint8) + cont, heir = cv2.findContours(cand, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + + # ======sorts contours by size============ + + sizes = [] + for i in range(len(cont)): + sizes = np.append(sizes, len(cont[i])) + + reord = sizes.ravel().argsort()[::-1] + + tmp = list(cont) + + for i in range(len(cont)): + tmp[i] = cont[reord[i]] + + cont = list(tmp) + + # =====cycles through contours========= + + for i in range(len(cont)): + x = np.append(x, len(cont[i])) + + # =====only takes values of minimum surface length and calculates area====== + + if len(cont[i]) <= 100: + continue + area = 0.5 * np.abs( + np.dot(cont[i][:, 0, 0], np.roll(cont[i][:, 0, 1], 1)) + - np.dot(cont[i][:, 0, 1], np.roll(cont[i][:, 0, 0], 1)) + ) + arcar = area * (dattoarc**2) + if arcar > 1000: + # =====finds centroid======= + + chpts = len(cont[i]) + cent = [np.mean(cont[i][:, 0, 0]), np.mean(cont[i][:, 0, 1])] + + # ===remove quiet sun regions encompassed by coronal holes====== + + if ( + cand[ + np.max(cont[i][:, 0, 0]) + 1, + cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], + ] + > 0 + ) and ( + iarr[ + np.max(cont[i][:, 0, 0]) + 1, + cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], + ] + > 0 + ): + mahotas.polygon.fill_polygon(np.array(list(zip(cont[i][:, 0, 1], cont[i][:, 0, 0]))), slate) + iarr[np.where(slate == 1)] = 0 + slate[:] = 0 + + else: + # ====create a simple centre point====== + + arccent = csys.all_pix2world(cent[0], cent[1], 0) + + # ====classifies off limb CH regions======== + + if (((arccent[0] ** 2) + (arccent[1] ** 2)) > (rs**2)) or ( + np.sum(np.array(csys.all_pix2world(cont[i][0, 0, 0], cont[i][0, 0, 1], 0)) ** 2) > (rs**2) + ): + mahotas.polygon.fill_polygon(np.array(list(zip(cont[i][:, 0, 1], cont[i][:, 0, 0]))), offarr) + else: + # =====classifies on disk coronal holes======= + + mahotas.polygon.fill_polygon(np.array(list(zip(cont[i][:, 0, 1], cont[i][:, 0, 0]))), slate) + poslin = np.where(slate == 1) + slate[:] = 0 + + # ====create an array for magnetic polarity======== + + pos = np.zeros((len(poslin[0]), 2), dtype=np.uint) + pos[:, 0] = np.array((poslin[0] - (s[0] / 2)) * convermul + (s[1] / 2), dtype=np.uint) + pos[:, 1] = np.array((poslin[1] - (s[0] / 2)) * convermul + (s[1] / 2), dtype=np.uint) + npix = list( + np.histogram( + datm[pos[:, 0], pos[:, 1]], + bins=np.arange( + np.round(np.min(datm[pos[:, 0], pos[:, 1]])) - 0.5, + np.round(np.max(datm[pos[:, 0], pos[:, 1]])) + 0.6, + 1, + ), + ) + ) + npix[0][np.where(npix[0] == 0)] = 1 + npix[1] = npix[1][:-1] + 0.5 + + wh1 = np.where(npix[1] > 0) + wh2 = np.where(npix[1] < 0) + + # =====magnetic cut offs dependant on area========= + + if ( + np.absolute((np.sum(npix[0][wh1]) - np.sum(npix[0][wh2])) / np.sqrt(np.sum(npix[0]))) + <= 10 + and arcar < 9000 + ): + continue + if ( + np.absolute(np.mean(datm[pos[:, 0], pos[:, 1]])) < garr[int(cent[0]), int(cent[1])] + and arcar < 40000 + ): + continue + iarr[poslin] = ident + + # ====create an accurate center point======= + + ypos = np.sum((poslin[0]) * np.absolute(hg.lat[poslin])) / np.sum(np.absolute(hg.lat[poslin])) + xpos = np.sum((poslin[1]) * np.absolute(hg.lon[poslin])) / np.sum(np.absolute(hg.lon[poslin])) + + arccent = csys.all_pix2world(xpos, ypos, 0) + + # ======calculate average angle coronal hole is subjected to====== + + dist = np.sqrt((arccent[0] ** 2) + (arccent[1] ** 2)) + ang = np.arcsin(dist / rs) + + # =====calculate area of CH with minimal projection effects====== + + trupixar = abs(area / np.cos(ang)) + truarcar = trupixar * (dattoarc**2) + trummar = truarcar * ((6.96e08 / rs) ** 2) + + # ====find CH extent in lattitude and longitude======== + + maxxlat = hg.lat[ + cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], + np.max(cont[i][:, 0, 0]), + ] + maxxlon = hg.lon[ + cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], + np.max(cont[i][:, 0, 0]), + ] + maxylat = hg.lat[ + np.max(cont[i][:, 0, 1]), + cont[i][np.where(cont[i][:, 0, 1] == np.max(cont[i][:, 0, 1]))[0][0], 0, 0], + ] + maxylon = hg.lon[ + np.max(cont[i][:, 0, 1]), + cont[i][np.where(cont[i][:, 0, 1] == np.max(cont[i][:, 0, 1]))[0][0], 0, 0], + ] + minxlat = hg.lat[ + cont[i][np.where(cont[i][:, 0, 0] == np.min(cont[i][:, 0, 0]))[0][0], 0, 1], + np.min(cont[i][:, 0, 0]), + ] + minxlon = hg.lon[ + cont[i][np.where(cont[i][:, 0, 0] == np.min(cont[i][:, 0, 0]))[0][0], 0, 1], + np.min(cont[i][:, 0, 0]), + ] + minylat = hg.lat[ + np.min(cont[i][:, 0, 1]), + cont[i][np.where(cont[i][:, 0, 1] == np.min(cont[i][:, 0, 1]))[0][0], 0, 0], + ] + minylon = hg.lon[ + np.min(cont[i][:, 0, 1]), + cont[i][np.where(cont[i][:, 0, 1] == np.min(cont[i][:, 0, 1]))[0][0], 0, 0], + ] + + # =====CH centroid in lat/lon======= + + centlat = hg.lat[int(ypos), int(xpos)] + centlon = hg.lon[int(ypos), int(xpos)] + + # ====caluclate the mean magnetic field===== + + mB = np.mean(datm[pos[:, 0], pos[:, 1]]) + mBpos = np.sum(npix[0][wh1] * npix[1][wh1]) / np.sum(npix[0][wh1]) + mBneg = np.sum(npix[0][wh2] * npix[1][wh2]) / np.sum(npix[0][wh2]) + + # =====finds coordinates of CH boundaries======= + + Ywb, Xwb = csys.all_pix2world( + cont[i][np.where(cont[i][:, 0, 0] == np.max(cont[i][:, 0, 0]))[0][0], 0, 1], + np.max(cont[i][:, 0, 0]), + 0, + ) + Yeb, Xeb = csys.all_pix2world( + cont[i][np.where(cont[i][:, 0, 0] == np.min(cont[i][:, 0, 0]))[0][0], 0, 1], + np.min(cont[i][:, 0, 0]), + 0, + ) + Ynb, Xnb = csys.all_pix2world( + np.max(cont[i][:, 0, 1]), + cont[i][np.where(cont[i][:, 0, 1] == np.max(cont[i][:, 0, 1]))[0][0], 0, 0], + 0, + ) + Ysb, Xsb = csys.all_pix2world( + np.min(cont[i][:, 0, 1]), + cont[i][np.where(cont[i][:, 0, 1] == np.min(cont[i][:, 0, 1]))[0][0], 0, 0], + 0, + ) + + width = round(maxxlon.value) - round(minxlon.value) + + if minxlon.value >= 0.0: + eastl = "W" + str(int(np.round(minxlon.value))) + else: + eastl = "E" + str(np.absolute(int(np.round(minxlon.value)))) + if maxxlon.value >= 0.0: + westl = "W" + str(int(np.round(maxxlon.value))) + else: + westl = "E" + str(np.absolute(int(np.round(maxxlon.value)))) + + if centlat >= 0.0: + centlat = "N" + str(int(np.round(centlat.value))) + else: + centlat = "S" + str(np.absolute(int(np.round(centlat.value)))) + if centlon >= 0.0: + centlon = "W" + str(int(np.round(centlon.value))) + else: + centlon = "E" + str(np.absolute(int(np.round(centlon.value)))) + + # ====insertions of CH properties into property array===== + + props[0, ident + 1] = str(ident) + props[1, ident + 1] = str(np.round(arccent[0])) + props[2, ident + 1] = str(np.round(arccent[1])) + props[3, ident + 1] = str(centlon + centlat) + props[4, ident + 1] = str(np.round(Xeb)) + props[5, ident + 1] = str(np.round(Yeb)) + props[6, ident + 1] = str(np.round(Xwb)) + props[7, ident + 1] = str(np.round(Ywb)) + props[8, ident + 1] = str(np.round(Xnb)) + props[9, ident + 1] = str(np.round(Ynb)) + props[10, ident + 1] = str(np.round(Xsb)) + props[11, ident + 1] = str(np.round(Ysb)) + props[12, ident + 1] = str(eastl + "-" + westl) + props[13, ident + 1] = str(width) + props[14, ident + 1] = f"{trummar / 1e+12:.1e}" + props[15, ident + 1] = str(np.round((arcar * 100 / (np.pi * (rs**2))), 1)) + props[16, ident + 1] = str(np.round(mB, 1)) + props[17, ident + 1] = str(np.round(mBpos, 1)) + props[18, ident + 1] = str(np.round(mBneg, 1)) + props[19, ident + 1] = str(np.round(np.max(npix[1]), 1)) + props[20, ident + 1] = str(np.round(np.min(npix[1]), 1)) + tbpos = np.sum(datm[pos[:, 0], pos[:, 1]][np.where(datm[pos[:, 0], pos[:, 1]] > 0)]) + props[21, ident + 1] = f"{tbpos:.1e}" + tbneg = np.sum(datm[pos[:, 0], pos[:, 1]][np.where(datm[pos[:, 0], pos[:, 1]] < 0)]) + props[22, ident + 1] = f"{tbneg:.1e}" + props[23, ident + 1] = f"{mB * trummar * 1e+16:.1e}" + props[24, ident + 1] = f"{mBpos * trummar * 1e+16:.1e}" + props[25, ident + 1] = f"{mBneg * trummar * 1e+16:.1e}" + + # =====sets up code for next possible coronal hole===== + + ident = ident + 1 + + # =====sets ident back to max value of iarr====== + + ident = ident - 1 + np.savetxt("ch_summary.txt", props, fmt="%s") + + # ====create image in output folder======= + # from scipy.misc import bytescale + + + def rescale01(arr, cmin=None, cmax=None, a=0, b=1): + if cmin or cmax: + arr = np.clip(arr, cmin, cmax) + return (b - a) * ((arr - np.min(arr)) / (np.max(arr) - np.min(arr))) + a + + + def plot_tricolor(): + tricolorarray = np.zeros((4096, 4096, 3)) + + data_a = img_as_ubyte(rescale01(np.log10(data), cmin=1.2, cmax=3.9)) + data_b = img_as_ubyte(rescale01(np.log10(datb), cmin=1.4, cmax=3.0)) + data_c = img_as_ubyte(rescale01(np.log10(datc), cmin=0.8, cmax=2.7)) + + tricolorarray[..., 0] = data_c / np.max(data_c) + tricolorarray[..., 1] = data_b / np.max(data_b) + tricolorarray[..., 2] = data_a / np.max(data_a) + + fig, ax = plt.subplots(figsize=(10, 10)) + + plt.imshow(tricolorarray, origin="lower") # , extent = ) + plt.contour(xgrid, ygrid, slate, colors="white", linewidths=0.5) + plt.savefig("tricolor.png") + plt.close() + + + def plot_mask(slate=slate): + chs = np.where(iarr > 0) + slate[chs] = 1 + slate = np.array(slate, dtype=np.uint8) + cont, heir = cv2.findContours(slate, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) + + circ[:] = 0 + r = rs / dattoarc + w = np.where((xgrid - center[0]) ** 2 + (ygrid - center[1]) ** 2 <= r**2) + circ[w] = 1.0 + + plt.figure(figsize=(10, 10)) + plt.xlim(143, 4014) + plt.ylim(143, 4014) + plt.scatter(chs[1], chs[0], marker="s", s=0.0205, c="black", cmap="viridis", edgecolor="none", alpha=0.2) + plt.gca().set_aspect("equal", adjustable="box") + plt.axis("off") + plt.contour(xgrid, ygrid, slate, colors="black", linewidths=0.5) + plt.contour(xgrid, ygrid, circ, colors="black", linewidths=1.0) + + plt.savefig("CH_mask_" + hedb["DATE"] + ".png", transparent=True) + plt.close() + + + # ====stores all CH properties in a text file===== + + plot_tricolor() + plot_mask() + + # ====EOF==== diff --git a/chimerapy/data/README.rst b/chimerapy/data/README.rst new file mode 100644 index 0000000..382f6e7 --- /dev/null +++ b/chimerapy/data/README.rst @@ -0,0 +1,6 @@ +Data directory +============== + +This directory contains data files included with the package source +code distribution. Note that this is intended only for relatively small files +- large files should be externally hosted and downloaded as needed. diff --git a/chimerapy/tests/__init__.py b/chimerapy/tests/__init__.py new file mode 100644 index 0000000..838b457 --- /dev/null +++ b/chimerapy/tests/__init__.py @@ -0,0 +1,4 @@ +# Licensed under a 3-clause BSD style license - see LICENSE.rst +""" +This module contains package tests. +""" diff --git a/chimerapy/tests/test_chimera.py b/chimerapy/tests/test_chimera.py new file mode 100644 index 0000000..ef21a7e --- /dev/null +++ b/chimerapy/tests/test_chimera.py @@ -0,0 +1,17 @@ +import os + +from chimerapy.chimera import chimera_legacy +from parfive import Downloader + +INPUT_FILES = { + 'aia171': 'https://solarmonitor.org/data/2016/09/22/fits/saia/saia_00171_fd_20160922_103010.fts.gz', + 'aia193': 'https://solarmonitor.org/data/2016/09/22/fits/saia/saia_00193_fd_20160922_103041.fts.gz', + 'aia211': 'https://solarmonitor.org/data/2016/09/22/fits/saia/saia_00211_fd_20160922_103046.fts.gz', + 'hmi_mag': 'https://solarmonitor.org/data/2016/09/22/fits/shmi/shmi_maglc_fd_20160922_094640.fts.gz' +} + + +def test_chimera(tmp_path): + files = Downloader.simple_download(INPUT_FILES.values(), path=tmp_path) + os.chdir(tmp_path) + chimera_legacy() diff --git a/chimerapy/version.py b/chimerapy/version.py new file mode 100644 index 0000000..90d4111 --- /dev/null +++ b/chimerapy/version.py @@ -0,0 +1,17 @@ +# NOTE: First try _dev.scm_version if it exists and setuptools_scm is installed +# This file is not included in wheels/tarballs, so otherwise it will +# fall back on the generated _version module. +try: + try: + from ._dev.scm_version import version + except ImportError: + from ._version import version +except Exception: + import warnings + + warnings.warn( + f'could not determine {__name__.split(".")[0]} package version; this indicates a broken installation' + ) + del warnings + + version = "0.0.0" diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..108ed37 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,77 @@ +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + + +# -- Project information ----------------------------------------------------- + +project = "CHIMERApy" +copyright = "2022, DIAS Solar" +author = "DIAS Solar" + +# The full version, including alpha/beta/rc tags +from chimerapy import __version__ + +release = __version__ + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named "sphinx.ext.*") or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.todo", + "sphinx.ext.coverage", + "sphinx.ext.inheritance_diagram", + "sphinx.ext.viewcode", + "sphinx.ext.napoleon", + "sphinx.ext.doctest", + "sphinx.ext.mathjax", + "sphinx_automodapi.automodapi", + "sphinx_automodapi.smart_resolver", +] + +# Add any paths that contain templates here, relative to this directory. +# templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = ".rst" + +# The master toctree document. +master_doc = "index" + +# -- Options for intersphinx extension --------------------------------------- + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {"python": ("https://docs.python.org/", None)} + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +# html_static_path = ["_static"] + +# By default, when rendering docstrings for classes, sphinx.ext.autodoc will +# make docs with the class-level docstring and the class-method docstrings, +# but not the __init__ docstring, which often contains the parameters to +# class constructors across the scientific Python ecosystem. The option below +# will append the __init__ docstring to the class-level docstring when rendering +# the docs. For more options, see: +# https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#confval-autoclass_content +autoclass_content = "both" diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..547bf57 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,15 @@ +CHIMERApy Documentation +----------------------- + +This is the documentation for CHIMERApy. + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..2119f51 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/licenses/LICENSE.rst b/licenses/LICENSE.rst new file mode 100644 index 0000000..c0bae65 --- /dev/null +++ b/licenses/LICENSE.rst @@ -0,0 +1,25 @@ +Copyright (c) 2024, DIAS Solar +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. +* Neither the name of the Astropy Team nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/licenses/README.rst b/licenses/README.rst new file mode 100644 index 0000000..67b82f6 --- /dev/null +++ b/licenses/README.rst @@ -0,0 +1,9 @@ +Licenses +======== + +This directory holds license and credit information for the package, +works the package is derived from, and/or datasets. + +Ensure that you pick a package licence which is in this folder and it matches +the one mentioned in the top level README.rst file. If you are using the +pre-rendered version of this template check for the word 'Other' in the README. diff --git a/licenses/TEMPLATE_LICENSE.rst b/licenses/TEMPLATE_LICENSE.rst new file mode 100644 index 0000000..544a2db --- /dev/null +++ b/licenses/TEMPLATE_LICENSE.rst @@ -0,0 +1,31 @@ +This project is based upon the OpenAstronomy package template +(https://github.com/OpenAstronomy/package-template/) which is licensed under the terms +of the following licence. + +--- + +Copyright (c) 2018, OpenAstronomy Developers +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +* Redistributions in binary form must reproduce the above copyright notice, this + list of conditions and the following disclaimer in the documentation and/or + other materials provided with the distribution. +* Neither the name of the Astropy Team nor the names of its contributors may be + used to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..92c08c4 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,90 @@ +[build-system] +requires = [ + "setuptools>=62.1", + "setuptools_scm[toml]>=6.2", + "wheel",] +build-backend = "setuptools.build_meta" + +[project] +name = "chimerapy" +description = "CHIMERApy is a python implmentation of the CHIMERA coronal hole detection algorithm." +readme = "README.rst" +requires-python = ">=3.9" +license = { file = "licenses/LICENSE.rst", content-type = "text/plain" } +dependencies = [ + 'sunpy[net,map]', + 'mahotas', + 'scikit-image', + 'opencv-python' + +] +dynamic = ["version"] + +[project.optional-dependencies] +tests = [ + "pytest", + "pytest-doctestplus", + "pytest-cov", +] +docs = [ + "sphinx", + "sphinx-automodapi", + "tomli; python_version <\"3.11\"", +] + +[project.urls] +repository = "chimerapy.readthedocs.io" + +[tool.setuptools] +zip-safe = false +include-package-data = true + +[tool.setuptools.packages.find] + +[tool.setuptools_scm] +write_to = "chimerapy/_version.py" + +[tool.pytest.ini_options] +testpaths = [ + "chimerapy", + "docs", +] +norecursedirs = ['chimerapy/_dev'] +doctest_plus = "enabled" +text_file_format = "rst" +addopts = "--doctest-rst" + +[tool.coverage.run] +omit = [ + "chimerapy/__init*", + "chimerapy/conftest.py", + "chimerapy/*setup_package*", + "chimerapy/tests/*", + "chimerapy/*/tests/*", + "chimerapy/extern/*", + "chimerapy/version*", + "*/chimerapy/__init*", + "*/chimerapy/conftest.py", + "*/chimerapy/*setup_package*", + "*/chimerapy/tests/*", + "*/chimerapy/*/tests/*", + "*/chimerapy/extern/*", + "*/chimerapy/version*", +] + +[tool.coverage.report] +exclude_lines = [ + # Have to re-enable the standard pragma + "pragma: no cover", + # Don't complain about packages we have installed + "except ImportError", + # Don't complain if tests don't hit assertions + "raise AssertionError", + "raise NotImplementedError", + # Don't complain about script hooks + "def main(.*):", + # Ignore branches that don't pertain to this version of Python + "pragma: py{ignore_python_version}", + # Don't complain about IPython completion helper + "def _ipython_key_completions_", +] diff --git a/setup.py b/setup.py new file mode 100755 index 0000000..c823345 --- /dev/null +++ b/setup.py @@ -0,0 +1,4 @@ +#!/usr/bin/env python +from setuptools import setup + +setup() diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..c226b1f --- /dev/null +++ b/tox.ini @@ -0,0 +1,76 @@ +[tox] +min_version = 4.0 +requires = + tox-pypi-filter>=0.14 +envlist = + py{39,310} + build_docs + +[testenv] +pypi_filter = https://raw.githubusercontent.com/sunpy/sunpy/main/.test_package_pins.txt +# Run the tests in a temporary directory to make sure that we don't import +# the package from the source tree +change_dir = .tmp/{envname} +description = + run tests + oldestdeps: with the oldest supported version of key dependencies + devdeps: with the latest developer version of key dependencies + +pass_env = + # Custom compiler locations (such as ccache) + CC + # Location of locales (needed by sphinx on some systems) + LOCALE_ARCHIVE + # If the user has set a LC override we should follow it + LC_ALL + +set_env = + MPLBACKEND=agg + devdeps: PIP_EXTRA_INDEX_URL = https://pypi.anaconda.org/astropy/simple https://pypi.anaconda.org/scientific-python-nightly-wheels/simple + # Define the base test command here to allow us to add more flags for each tox factor + PYTEST_COMMAND = pytest -vvv -r fEs --pyargs chimerapy --cov-report=xml --cov=chimerapy --cov-config={toxinidir}/pyproject.toml {toxinidir}/docs + +deps = + # For packages which publish nightly wheels this will pull the latest nightly + devdeps: numpy>=0.0.dev0 + # Packages without nightly wheels will be built from source like this + # devdeps: git+https://github.com/ndcube/ndcube + oldestdeps: astropy<5.3.0 + pytest-cov + +# The following indicates which extras_require will be installed +extras = + tests + +commands_pre = + oldestdeps: minimum_dependencies chimerapy --filename requirements-min.txt + oldestdeps: pip install -r requirements-min.txt + pip freeze --all --no-input + +commands = + # To run different commands for different factors exclude the factor from the default command like this + # !online: {env:PYTEST_COMMAND} {posargs} + # Then specify a specific one like this + # online: {env:PYTEST_COMMAND} --remote-data=any {posargs} + # If you have no factors which require different commands this is all you need: + {env:PYTEST_COMMAND} {posargs} + +# Uncomment this once we add pre-commit +#[testenv:codestyle] +#pypi_filter = +#skip_install = true +#description = Run all style and file checks with pre-commit +#deps = +# pre-commit +#commands = +# pre-commit install-hooks +# pre-commit run --color always --all-files --show-diff-on-failure + +[testenv:build_docs] +description = invoke sphinx-build to build the HTML docs +change_dir = + docs +extras = + docs +commands = + sphinx-build -j auto --color -W --keep-going -b html -d _build/.doctrees . _build/html {posargs}