Skip to content

Commit

Permalink
Revert "gQuant34 - Update build.sh to make use of RAPIDS v0.8 container"
Browse files Browse the repository at this point in the history
  • Loading branch information
yidong72 authored Aug 13, 2019
1 parent 9c9cbe1 commit ec8406b
Show file tree
Hide file tree
Showing 5 changed files with 151 additions and 156 deletions.
85 changes: 41 additions & 44 deletions build.sh
Original file line number Diff line number Diff line change
@@ -1,68 +1,65 @@
#!/bin/bash

echo "Building gQuant container..."

read -p "Please, press '1' for cuda 9.2, or '2' for cuda 10.0. [1]/2: " CUDA_VERSION
CUDA_VERSION=${CUDA_VERSION:-1}

if [ "$CUDA_VERSION" -eq 2 ]; then
echo "cuda 10.0 selected."
CONTAINER='nvcr.io/nvidia/rapidsai/rapidsai:cuda10.0-runtime-ubuntu16.04'
CUPY='cupy-cuda100'
else
echo "cuda 9.2 selected."
CONTAINER='nvcr.io/nvidia/rapidsai/rapidsai:cuda9.2-runtime-ubuntu16.04'
CUPY='cupy-cuda92'
fi

read -p "Would you like to install Vim JupyterLab Extension (optional) [N]/y: " VIM_INSTALL
VIM_INSTALL=${VIM_INSTALL:-N}

if [ "$VIM_INSTALL" = "Y" ] || [ "$VIM_INSTALL" = "y" ]; then
echo "Vim JupyterLab Extension will be installed."
else
echo "Vim JupyterLab Extension will not be installed."
fi

D_FILE=${D_FILE:='Dockerfile.Rapids'}
D_CONT=${D_CONT:='gquant/gquant:latest'}

echo "Fetching latest version of gQuant project"
git clone --recursive https://github.com/rapidsai/gQuant


cat > $D_FILE <<EOF
FROM $CONTAINER
USER root
FROM nvcr.io/nvidia/rapidsai/rapidsai:0.7-cuda10.0-devel-ubuntu18.04-gcc7-py3.6
ADD gQuant /rapids/gQuant
USER root
RUN apt-get update && apt-get install -y libfontconfig1 libxrender1
RUN apt-get update
RUN apt-get install -y libfontconfig1 libxrender1
SHELL ["bash","-c"]
#
# Additional python libs
#
RUN pip install nxpd graphviz pudb dask_labextension sphinx sphinx_rtd_theme recommonmark numpydoc $CUPY
RUN conda install -y -c conda-forge python-graphviz bqplot=0.11.5 nodejs=11.11.0 jupyterlab=0.35.4 \
ipywidgets=7.4.2 pytables mkl numexpr
RUN source activate rapids \
&& pip install cython matplotlib networkx nxpd graphviz pudb
RUN cd /rapids && source activate rapids \
&& conda install -c conda-forge bqplot nodejs \
&& conda install -y python-graphviz\
&& conda install -y tqdm \
&& conda install -y pytables \
&& conda install -y -f mkl \
&& conda install -y numpy scipy scikit-learn numexpr
## && conda install -c nvidia -c rapidsai -c numba -c conda-forge -c defaults cudf=0.6 python=3.6 cudatoolkit=10.0
## && conda install -c rapidsai cudf \
## && conda install -c rapidsai cuml \
## && git clone https://github.com/rapidsai/cuml.git
#
# required set up
#
RUN jupyter labextension install @jupyter-widgets/[email protected] \
&& jupyter labextension install [email protected] \
&& mkdir /.local /.jupyter /.config /.cupy \
&& chmod 777 /.local /.jupyter /.config /.cupy
RUN if [ "$VIM_INSTALL" = "Y" ] || [ "$VIM_INSTALL" = "y" ]; then /conda/envs/rapids/bin/jupyter labextension install [email protected] ; fi
RUN source activate rapids \
&& /conda/envs/rapids/bin/jupyter labextension install @jupyter-widgets/jupyterlab-manager \
&& /conda/envs/rapids/bin/jupyter labextension install bqplot \
&& mkdir /.local \
&& chmod 777 /.local \
&& mkdir /.jupyter \
&& chmod 777 /.jupyter \
&& mkdir /.config \
&& chmod 777 /.config \
&& mkdir /.cupy \
&& chmod 777 /.cupy
RUN source activate rapids \
&& pip install dask_labextension \
&& pip install sphinx sphinx_rtd_theme recommonmark numpydoc \
&& /conda/envs/rapids/bin/jupyter labextension install dask-labextension \
&& pip install cupy-cuda100
EXPOSE 8888
EXPOSE 8787
EXPOSE 8786
# the addon for vim editor
# RUN source activate rapids \
# && /conda/envs/rapids/bin/jupyter labextension install jupyterlab_vim
WORKDIR /
EOF

docker build -f $D_FILE -t $D_CONT .

131 changes: 64 additions & 67 deletions notebook/01_tutorial.ipynb

Large diffs are not rendered by default.

18 changes: 11 additions & 7 deletions notebook/02_single_stock_trade.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@
"First import all the necessary modules."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": 1,
Expand All @@ -17,7 +24,6 @@
"import sys\n",
"sys.path.append('..')\n",
"\n",
"import os\n",
"import warnings\n",
"import ipywidgets as widgets\n",
"import nxpd\n",
Expand Down Expand Up @@ -192,7 +198,7 @@
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "c5f39b2436144ba594198a74d2743203",
"model_id": "69388a913ab54af9a4b2ea7bcce7e8ed",
"version_major": 2,
"version_minor": 0
},
Expand All @@ -205,13 +211,11 @@
}
],
"source": [
"action = \"load\" if os.path.isfile('./.cache/node_csvdata.hdf5') else \"save\"\n",
"\n",
"symbol = 'REXX'\n",
"o = dff.run(obj,\n",
" outputs=['node_sharpeRatio', 'node_cumlativeReturn',\n",
" 'node_barplot', 'node_lineplot', 'node_csvdata'],\n",
" replace={'node_csvdata': {action: True},\n",
" replace={'node_csvdata': {\"load\": True},\n",
" 'node_barplot': {'conf': {\"points\": 300}},\n",
" 'node_assetFilter':\n",
" {'conf': {'asset': list_stocks[symbol]}}})\n",
Expand Down Expand Up @@ -242,7 +246,7 @@
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "e0b9e4da549842e88078a6206ff8cc9e",
"model_id": "78618cf4870b49c2a29058e23cbe8fa6",
"version_major": 2,
"version_minor": 0
},
Expand Down Expand Up @@ -274,7 +278,7 @@
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "790208f8258c44b4a30b08552cd031bb",
"model_id": "d6e97f90036f4aacbb4e1998ae3e1515",
"version_major": 2,
"version_minor": 0
},
Expand Down
25 changes: 12 additions & 13 deletions notebook/03_simple_dask_example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
Expand All @@ -16,7 +16,7 @@
},
{
"cell_type": "code",
"execution_count": 2,
"execution_count": 3,
"metadata": {},
"outputs": [
{
Expand All @@ -27,7 +27,7 @@
"<td style=\"vertical-align: top; border: 0px solid white\">\n",
"<h3>Client</h3>\n",
"<ul>\n",
" <li><b>Scheduler: </b>tcp://127.0.0.1:37179\n",
" <li><b>Scheduler: </b>tcp://127.0.0.1:38970\n",
" <li><b>Dashboard: </b><a href='http://127.0.0.1:8787/status' target='_blank'>http://127.0.0.1:8787/status</a>\n",
"</ul>\n",
"</td>\n",
Expand All @@ -36,17 +36,17 @@
"<ul>\n",
" <li><b>Workers: </b>8</li>\n",
" <li><b>Cores: </b>8</li>\n",
" <li><b>Memory: </b>540.95 GB</li>\n",
" <li><b>Memory: </b>536.39 GB</li>\n",
"</ul>\n",
"</td>\n",
"</tr>\n",
"</table>"
],
"text/plain": [
"<Client: scheduler='tcp://127.0.0.1:37179' processes=8 cores=8>"
"<Client: scheduler='tcp://127.0.0.1:38970' processes=8 cores=8>"
]
},
"execution_count": 2,
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -68,7 +68,7 @@
},
{
"cell_type": "code",
"execution_count": 3,
"execution_count": 4,
"metadata": {},
"outputs": [
{
Expand All @@ -89,7 +89,7 @@
"<IPython.core.display.HTML object>"
]
},
"execution_count": 3,
"execution_count": 4,
"metadata": {},
"output_type": "execute_result"
}
Expand Down Expand Up @@ -121,14 +121,14 @@
},
{
"cell_type": "code",
"execution_count": 4,
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"/conda/envs/rapids/lib/python3.6/site-packages/cudf/io/hdf.py:13: UserWarning: Using CPU via Pandas to read HDF dataset, this may be GPU accelerated in the future\n",
"/conda/envs/rapids/lib/python3.6/site-packages/cudf-0.7.1-py3.6-linux-x86_64.egg/cudf/io/hdf.py:13: UserWarning: Using CPU via Pandas to read HDF dataset, this may be GPU accelerated in the future\n",
" warnings.warn(\"Using CPU via Pandas to read HDF dataset, this may \"\n"
]
},
Expand Down Expand Up @@ -156,18 +156,17 @@
"import os\n",
"node_csv = {\"id\": \"node_csvdata\",\n",
" \"type\": \"CsvStockLoader\",\n",
" \"conf\": {\"path\": \"./data/stocks/stock_price_hist.csv.gz\"},\n",
" \"conf\": {\"path\": \"/Project/data/stocks/stock_price_hist.csv.gz\"},\n",
" \"inputs\": []}\n",
"\n",
"node_sort = {\"id\": \"node_sort\",\n",
" \"type\": \"SortNode\",\n",
" \"conf\": {\"keys\": ['asset', 'datetime']},\n",
" \"inputs\": [\"node_csvdata\"]}\n",
"\n",
"action = \"load\" if os.path.isfile('./.cache/node_csvdata.hdf5') else \"save\"\n",
"df = run([node_csv, node_sort],\n",
" outputs=['node_sort'],\n",
" replace={'node_csvdata': {action: True}})[0]\n",
" replace={'node_csvdata': {\"load\": True}})[0]\n",
"os.makedirs('many-small', exist_ok=True)\n",
"print(df)\n",
"df = dd.from_pandas(df.to_pandas(), npartitions=8).to_csv('many-small/*.csv', index=False)"
Expand Down
Loading

0 comments on commit ec8406b

Please sign in to comment.