Commit fe0997f7 authored by Federico Julian Camerota Verdu's avatar Federico Julian Camerota Verdu
Browse files

Added scripts for dask+cuml

parent 9e7fe362
import sys
import cudf
import cuml
from cuml.dask.cluster import KMeans as dask_kmeans
from cuml.dask.decomposition import PCA as dask_pca
import cupy as cu
import dask
import dask.array as da
from dask_cuda import LocalCUDACluster
import dask_cudf
from dask.dataframe import from_array
from dask.distributed import Client, LocalCluster, wait
from multiprocessing import Process, freeze_support
import mdtraj as md
import numpy as np
import os
import pandas as pd
from sklearn.decomposition import PCA
import time
if __name__ == "__main__":
dataDir = '/dgx/home/userinternal/fcamerot/ligatetrajectorybenchmark/data/'
fileName = dataDir + 'nsp16_nsp10_6wkq' #~5GB trajectory
#fileName = dataDir + 'NSP12-7-8_6M71' #~10GB trajectory
topol = fileName + '.pdb'
trajs = fileName + '.xtc'
# cluster = LocalCUDACluster(n_workers=4, threads_per_worker=1)
# cluster = LocalCluster()
#client = Client(cluster, processes=False)
client = Client()
# tic = time.time()
Ref = md.load(topol)
top = Ref.topology
trj = md.load(trajs,top=topol)
#kmeans_model = dask_kmeans(n_clusters=2)
kmeans_model = dask_pca(n_components=2)
pca_trj =[:,:,:]
pca_trj = pca_trj.reshape(pca_trj.shape[0] , -1 )
dask_df = from_array(pca_trj)
dask_df = dask_df.map_partitions(cudf.from_pandas)
#proj = kmeans_model.fit_predict(dask_df)
proj =
proj = proj.compute()
# toc = time.time()
#SBATCH --gres=gpu:4
#SBATCH --ntasks-per-node=32
#SBATCH -p dgx_usr_prod
#SBATCH -t 00:20:00
module load tools/anaconda
singularity exec --nv /dgx/home/userinternal/fcamerot/rapidsai-core_21.06-cuda11.0-runtime-ubuntu20.04-py3.7.sif /dgx/home/userinternal/fcamerot/ligatetrajectorybenchmark/scripts/
cd /
. /opt/conda/etc/profile.d/
conda env list
conda activate rapids
python /dgx/home/userinternal/fcamerot/ligatetrajectorybenchmark/scripts/
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment