Skip to content

Commit

Permalink
Merge pull request #14 from thomasthaddeus/dev
Browse files Browse the repository at this point in the history
v1.2.0
  • Loading branch information
thomasthaddeus authored May 6, 2024
2 parents 2e13937 + e9c2559 commit 7fc3e04
Show file tree
Hide file tree
Showing 8 changed files with 851 additions and 35 deletions.
81 changes: 77 additions & 4 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Upload Python Package
name: Publish Python 🐍 distribution 📦 to PyPI

on:
release:
Expand All @@ -8,7 +8,8 @@ permissions:
contents: read

jobs:
deploy:
build:
name: Build distribution 📦
runs-on: ubuntu-latest

steps:
Expand All @@ -27,8 +28,80 @@ jobs:
- name: Build package
run: python -m build

- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
- name: Store the distribution packages
uses: actions/upload-artifact@v3
with:
name: python-package-distributions
path: dist/

publish-to-pypi:
name: >-
Publish Python 🐍 distribution 📦 to PyPI
if: startsWith(github.ref, 'refs/tags/') # only publish to PyPI on tag pushes
needs:
- build
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/p/dataanalysistoolkit # Replace <package-name> with your PyPI project name
permissions:
id-token: write # IMPORTANT: mandatory for trusted publishing

steps:
- name: Download all the dists
uses: actions/download-artifact@v3
with:
name: python-package-distributions
path: dist/

- name: Publish distribution 📦 to PyPI
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}

github-release:
name: >-
Sign the Python 🐍 distribution 📦 with Sigstore
and upload them to GitHub Release
needs:
- publish-to-pypi
runs-on: ubuntu-latest

permissions:
contents: write # IMPORTANT: mandatory for making GitHub Releases
id-token: write # IMPORTANT: mandatory for sigstore

steps:
- name: Download all the dists
uses: actions/download-artifact@v4
with:
name: python-package-distributions
path: dist/

- name: Sign the dists with Sigstore
uses: sigstore/[email protected]
with:
inputs: >-
./dist/*.tar.gz
./dist/*.whl
- name: Create GitHub Release
env:
GITHUB_TOKEN: ${{ github.token }}
run: >-
gh release create
'${{ github.ref_name }}'
--repo '${{ github.repository }}'
--notes ""
- name: Upload artifact signatures to GitHub Release
env:
GITHUB_TOKEN: ${{ github.token }}
# Upload to GitHub Release using the `gh` CLI.
# `dist/` contains the built packages, and the
# sigstore-produced signatures and certificates.
run: >-
gh release upload
'${{ github.ref_name }}' dist/**
--repo '${{ github.repository }}'
Original file line number Diff line number Diff line change
Expand Up @@ -47,14 +47,10 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"vscode": {
"languageId": "shellscript"
}
},
"metadata": {},
"outputs": [],
"source": [
"pip install dataanalysistoolkit"
"%pip install dataanalysistoolkit\n"
]
},
{
Expand All @@ -66,14 +62,26 @@
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"execution_count": 3,
"metadata": {},
"outputs": [
{
"ename": "ModuleNotFoundError",
"evalue": "No module named 'src'",
"output_type": "error",
"traceback": [
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
"Cell \u001b[1;32mIn[3], line 2\u001b[0m\n\u001b[0;32m 1\u001b[0m \u001b[38;5;66;03m# Importing necessary libraries from the toolkit\u001b[39;00m\n\u001b[1;32m----> 2\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01msrc\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdata_sources\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mexcel_connector\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m ExcelConnector\n\u001b[0;32m 3\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01msrc\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdata_sources\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01msql_connector\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m SQLConnector\n\u001b[0;32m 4\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01msrc\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mdata_sources\u001b[39;00m\u001b[38;5;21;01m.\u001b[39;00m\u001b[38;5;21;01mapi_connector\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m APIConnector\n",
"\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'src'"
]
}
],
"source": [
"# Importing necessary libraries from the toolkit\n",
"from src.data_sources.excel_connector import ExcelConnector\n",
"from src.data_sources.sql_connector import SQLConnector\n",
"from src.data_sources.api_connector import APIConnector\n",
"from src.data_sources import ExcelConnector\n",
"from src.data_sources import SQLConnector\n",
"from src.data_sources import APIConnector\n",
"from src.integrators.data_integrator import DataIntegrator\n",
"from src.formatters.data_formatter import DataFormatter\n",
"\n",
Expand Down Expand Up @@ -138,7 +146,7 @@
"df_sheet_index = excel_connector.load_data(sheet_name=1)\n",
"\n",
"# Display the first few rows of the dataframe\n",
"print(df_sheet_name.head())"
"print(df_sheet_name.head())\n"
]
},
{
Expand All @@ -155,7 +163,7 @@
"outputs": [],
"source": [
"excel_connector = ExcelConnector('path/to/excel.xlsx')\n",
"df_excel = excel_connector.load_data(sheet_name='Sheet1')"
"df_excel = excel_connector.load_data(sheet_name='Sheet1')\n"
]
},
{
Expand Down Expand Up @@ -188,7 +196,7 @@
"df_sql = sql_connector.query_data(query)\n",
"\n",
"# Display the result\n",
"print(df_sql.head())"
"print(df_sql.head())\n"
]
},
{
Expand Down Expand Up @@ -369,7 +377,7 @@
"outputs": [],
"source": [
"# Custom method for time-series data integration (hypothetical example)\n",
"time_aligned_df = integrator.integrate_time_series('timestamp_column')"
"time_aligned_df = integrator.integrate_time_series('timestamp_column')\n"
]
},
{
Expand Down Expand Up @@ -484,7 +492,7 @@
"formatter.fill_missing_values('another_column', method='ffill')\n",
"\n",
"# Checking for missing values\n",
"print(combined_df.isnull().sum())"
"print(combined_df.isnull().sum())\n"
]
},
{
Expand All @@ -508,7 +516,7 @@
"formatter.custom_transform('numeric_column', lambda x: x ** 2)\n",
"\n",
"# Viewing the transformed data\n",
"print(combined_df['numeric_column'].head())"
"print(combined_df['numeric_column'].head())\n"
]
},
{
Expand Down Expand Up @@ -621,8 +629,22 @@
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"name": "python"
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.9"
}
},
"nbformat": 4,
Expand Down
2 changes: 1 addition & 1 deletion docs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
project = 'Data Analysis Toolkit'
copyright = '2024, Thaddeus Thomas'
author = 'Thaddeus Thomas'
release = '1.1.1'
release = '1.2.0'

# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
Expand Down
Loading

0 comments on commit 7fc3e04

Please sign in to comment.