Spaces:
Sleeping
Sleeping
Commit
·
f17cc6c
1
Parent(s):
a45cd99
conda & miniconda added. Vars removed
Browse files- Dockerfile +75 -22
- docker/environment.yml +72 -0
- docker/postBuild +1 -0
- requirements.txt → docker/requirements.txt +0 -0
- r_shiny_app/execution_id +0 -0
- r_shiny_app/global.R +160 -0
- r_shiny_app/server-helper.R +60 -0
- r_shiny_app/server.R +1087 -0
- r_shiny_app/ui.R +179 -0
Dockerfile
CHANGED
@@ -15,25 +15,25 @@ FROM rocker/ml:4.2
|
|
15 |
SHELL [ "/bin/bash", "--login", "-c" ]
|
16 |
|
17 |
###############
|
18 |
-
# Ensure vars #
|
19 |
###############
|
20 |
-
ARG USER_NAME
|
21 |
-
ARG USER_ID
|
22 |
-
ARG GROUP_ID
|
23 |
-
ARG WANDB_API_KEY
|
24 |
-
ARG WANDB_ENTITY
|
25 |
-
ARG WANDB_PROJECT
|
26 |
-
ARG GH_TOKEN
|
27 |
-
|
28 |
-
|
29 |
-
ENV USER_NAME=$USER_NAME
|
30 |
-
ENV USER_ID=$USER_ID
|
31 |
-
ENV GROUP_ID=$GROUP_ID
|
32 |
-
ENV WANDB_ENTITY=$WANDB_ENTITY
|
33 |
-
ENV WANDB_API_KEY=$WANDB_API_KEY
|
34 |
ENV RETICULATE_PYTHON_ENV=/home/${USER_NAME}/env
|
35 |
ENV RETICULATE_MINICONDA_PATH=/home/${USER_NAME}/miniconda
|
36 |
-
ENV WANDB_PROJECT=$WANDB_PROJECT
|
37 |
ENV ROOT=TRUE
|
38 |
ENV CUDA_VISIBLE_DEVICES=0,1,2
|
39 |
ENV GH_TOKEN=$ghtoken
|
@@ -51,10 +51,6 @@ RUN echo ${USER_NAME}"|"${USER_ID}"|"${GROUP_ID}
|
|
51 |
COPY --chown=${USER_ID}:${GROUP_ID} ../r_shiny_app /$HOME/app
|
52 |
COPY --chown=${USER_ID}:${GROUP_ID} ${LOCAL_DATA_PATH} /$HOME/data/
|
53 |
COPY --chown=${USER_ID}:${GROUP_ID} ../dvats ${HOME}/dvats
|
54 |
-
COPY --chown=${USER_ID}:${GROUP_ID} ../lib /${HOME}/lib
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
|
59 |
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys A4B469963BF863CC
|
60 |
RUN apt-get update
|
@@ -101,7 +97,64 @@ ARG WANDB_API_KEY
|
|
101 |
RUN echo "WANDB_API_KEY=${WANDB_API_KEY}" >> /${HOME}/.Renviron
|
102 |
|
103 |
|
104 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
# Rewrite the miniconda path environment in case it has been redefined in the compose file
|
106 |
RUN echo "RETICULATE_MINICONDA_PATH=${RETICULATE_MINICONDA_PATH}" >> ${HOME}/.Renviron
|
107 |
|
@@ -112,7 +165,7 @@ RUN echo "--> Permissions"
|
|
112 |
|
113 |
RUN echo "$(id )"
|
114 |
|
115 |
-
RUN echo "--> Setup
|
116 |
|
117 |
RUN echo ". ${HOME}miniconda3/etc/profile.d/conda.sh" >> ${HOME}/rstudio/.bashrc
|
118 |
|
|
|
15 |
SHELL [ "/bin/bash", "--login", "-c" ]
|
16 |
|
17 |
###############
|
18 |
+
# Ensure vars # (Only for local test - DON'T USE IT IN HUGGINGFACE. ENSURE .env IS NOT ACCESIBLE IN THE WEB, USE THEIR SECRET MANAGEMENT)
|
19 |
###############
|
20 |
+
#ARG USER_NAME
|
21 |
+
#ARG USER_ID
|
22 |
+
#ARG GROUP_ID
|
23 |
+
#ARG WANDB_API_KEY
|
24 |
+
#ARG WANDB_ENTITY
|
25 |
+
#ARG WANDB_PROJECT
|
26 |
+
#ARG GH_TOKEN
|
27 |
+
|
28 |
+
|
29 |
+
#ENV USER_NAME=$USER_NAME
|
30 |
+
#ENV USER_ID=$USER_ID
|
31 |
+
#ENV GROUP_ID=$GROUP_ID
|
32 |
+
#ENV WANDB_ENTITY=$WANDB_ENTITY
|
33 |
+
#ENV WANDB_API_KEY=$WANDB_API_KEY
|
34 |
ENV RETICULATE_PYTHON_ENV=/home/${USER_NAME}/env
|
35 |
ENV RETICULATE_MINICONDA_PATH=/home/${USER_NAME}/miniconda
|
36 |
+
#ENV WANDB_PROJECT=$WANDB_PROJECT
|
37 |
ENV ROOT=TRUE
|
38 |
ENV CUDA_VISIBLE_DEVICES=0,1,2
|
39 |
ENV GH_TOKEN=$ghtoken
|
|
|
51 |
COPY --chown=${USER_ID}:${GROUP_ID} ../r_shiny_app /$HOME/app
|
52 |
COPY --chown=${USER_ID}:${GROUP_ID} ${LOCAL_DATA_PATH} /$HOME/data/
|
53 |
COPY --chown=${USER_ID}:${GROUP_ID} ../dvats ${HOME}/dvats
|
|
|
|
|
|
|
|
|
54 |
|
55 |
RUN apt-key adv --keyserver keyserver.ubuntu.com --recv-keys A4B469963BF863CC
|
56 |
RUN apt-get update
|
|
|
97 |
RUN echo "WANDB_API_KEY=${WANDB_API_KEY}" >> /${HOME}/.Renviron
|
98 |
|
99 |
|
100 |
+
ENV MINICONDA_VERSION=4.10.3 \
|
101 |
+
#ENV MINICONDA_VERSION=23.9.0 \
|
102 |
+
CONDA_DIR=$HOME/miniconda3 \
|
103 |
+
# Make non-activate conda commands available
|
104 |
+
PATH=$CONDA_DIR/bin:$PATH \
|
105 |
+
PROJECT_DIR=$HOME
|
106 |
+
|
107 |
+
RUN echo "HOME: ${HOME} | CONDA_DIR = ${CONDA_DIR}" \
|
108 |
+
#-- Install MINICONDA
|
109 |
+
&& wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-py38_$MINICONDA_VERSION-Linux-x86_64.sh -O ~/miniconda.sh \
|
110 |
+
&& chmod +x ~/miniconda.sh \
|
111 |
+
&& ~/miniconda.sh -b -p $CONDA_DIR \
|
112 |
+
&& rm ~/miniconda.sh
|
113 |
+
|
114 |
+
# Make conda activate command available from /bin/bash --login shells
|
115 |
+
RUN echo ". $CONDA_DIR/etc/profile.d/conda.sh" >> ~/.profile
|
116 |
+
# make conda activate command available from /bin/bash --interative shells
|
117 |
+
RUN conda init bash \
|
118 |
+
# create a project directory inside user home
|
119 |
+
&& mkdir -p $PROJECT_DIR
|
120 |
+
|
121 |
+
WORKDIR $PROJECT_DIR
|
122 |
+
##########################
|
123 |
+
# Install & update MAMBA #
|
124 |
+
##########################
|
125 |
+
ENV ENV_PREFIX $PROJECT_DIR/env
|
126 |
+
RUN conda install --name base --channel conda-forge mamba \
|
127 |
+
&& mamba update --name base --channel defaults conda
|
128 |
+
#-- Build the mamba environment
|
129 |
+
RUN mamba install conda-lock -c conda-forge
|
130 |
+
COPY --chown=$UID:$GID docker/environment.yml docker/requirements.txt /tmp/
|
131 |
+
#RUN mamba lock -f /tmp/environment.yml --lockfile /tmp/environment.lock
|
132 |
+
#RUN mamba create --prefix ${ENV_PREFIX} --file /tmp/environment.lock
|
133 |
+
RUN mamba env create --prefix ${ENV_PREFIX} --file /tmp/environment.yml
|
134 |
+
RUN conda clean --all --yes
|
135 |
+
|
136 |
+
# run the postBuild script to install the JupyterLab extensions
|
137 |
+
COPY --chown=$UID:$GID docker/postBuild /usr/local/bin
|
138 |
+
RUN chmod u+x /usr/local/bin/postBuild \
|
139 |
+
&& conda activate $ENV_PREFIX \
|
140 |
+
&& /usr/local/bin/postBuild \
|
141 |
+
&& conda deactivate \
|
142 |
+
# Make bash automatically activate the conda environment
|
143 |
+
&& echo "conda activate $ENV_PREFIX" >> ~/.bashrc
|
144 |
+
RUN chmod u+x /usr/local/bin/postBuild \
|
145 |
+
&& conda activate $ENV_PREFIX \
|
146 |
+
&& /usr/local/bin/postBuild \
|
147 |
+
&& conda deactivate \
|
148 |
+
# Make bash automatically activate the conda environment
|
149 |
+
&& echo "conda activate $ENV_PREFIX" >> ~/.bashrc
|
150 |
+
|
151 |
+
RUN conda list --prefix ${ENV_PREFIX}
|
152 |
+
|
153 |
+
|
154 |
+
|
155 |
+
|
156 |
+
|
157 |
+
RUN echo "--> rewrrite miniconda path"
|
158 |
# Rewrite the miniconda path environment in case it has been redefined in the compose file
|
159 |
RUN echo "RETICULATE_MINICONDA_PATH=${RETICULATE_MINICONDA_PATH}" >> ${HOME}/.Renviron
|
160 |
|
|
|
165 |
|
166 |
RUN echo "$(id )"
|
167 |
|
168 |
+
RUN echo "--> Setup miniconda & Mamba"
|
169 |
|
170 |
RUN echo ". ${HOME}miniconda3/etc/profile.d/conda.sh" >> ${HOME}/rstudio/.bashrc
|
171 |
|
docker/environment.yml
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: dvats_env
|
2 |
+
channels:
|
3 |
+
#--- Torch & Cuda
|
4 |
+
- pytorch
|
5 |
+
#- nvidia/label/cuda-12.2.0
|
6 |
+
- nvidia
|
7 |
+
#--- Generic
|
8 |
+
- conda-forge
|
9 |
+
- defaults
|
10 |
+
#--- AI
|
11 |
+
- fastai
|
12 |
+
- rapidsai
|
13 |
+
- defaults
|
14 |
+
- conda-forge
|
15 |
+
|
16 |
+
- timeseriesai
|
17 |
+
dependencies:
|
18 |
+
##########################
|
19 |
+
# Generic configurations #
|
20 |
+
##########################
|
21 |
+
- nano=7.2=h8228510_0
|
22 |
+
- git=2.42.0=pl5321h86e50cf_0
|
23 |
+
#--- github
|
24 |
+
- gh=2.37.0=ha8f183a_0
|
25 |
+
#--- Python
|
26 |
+
- python=3.10.13=hd12c33a_0_cpython
|
27 |
+
#WANDB <= 0.15.12 needs this version. See https://github.com/wandb/wandb/issues/6546
|
28 |
+
- ipython=8.16.1=pyh0d859eb_0
|
29 |
+
- pip=23.3.1=pyhd8ed1ab_0
|
30 |
+
- conda-forge::twine=4.0.2=pyhd8ed1ab_0
|
31 |
+
- conda-forge::xeus-python=0.15.10=py310hd41b1e2_1
|
32 |
+
#--- NodeJS
|
33 |
+
- conda-forge::nodejs=20.8.1=h1990674_0
|
34 |
+
#---Jupyter
|
35 |
+
- conda-forge::ipywidgets=8.1.1=pyhd8ed1ab_0
|
36 |
+
- conda-forge::jupyterlab=4.0.7=pyhd8ed1ab_0
|
37 |
+
- conda-forge::jupyterlab-git=0.41.0=pyhd8ed1ab_1
|
38 |
+
- conda-forge::nbclassic=1.0.0=pyhb4ecaf3_1
|
39 |
+
- conda-forge::nbdev=2.2.10=pyhd8ed1ab_0
|
40 |
+
- nbformat>=5.1.3
|
41 |
+
#--- passwords & files
|
42 |
+
- keyrings.alt
|
43 |
+
- platformdirs=2.5.2
|
44 |
+
- conda-forge::keyrings.alt=4.2.0=pyhd8ed1ab_0
|
45 |
+
- psutil>=5.4.8
|
46 |
+
##############
|
47 |
+
# CUDA & GPU #
|
48 |
+
##############
|
49 |
+
- nvidia::cuda-toolkit=11.6=0
|
50 |
+
- rapidsai::cudf=23.02=cuda_11_py310_g5ad4a85b9d_0
|
51 |
+
- rapidsai::cuml=23.02=cuda11_py310_g3356f05bd_0
|
52 |
+
- conda-forge::genv=1.2.0=pyhd8ed1ab_0
|
53 |
+
############
|
54 |
+
# Specific #
|
55 |
+
############
|
56 |
+
#--- Torch
|
57 |
+
- pytorch=1.13=py3.10_cuda11.7_cudnn8.5.0_0
|
58 |
+
#---Graphs & imgs
|
59 |
+
- conda-forge::plotly=5.18.0=pyhd8ed1ab_0
|
60 |
+
- conda-forge::jpeg=9e=h166bdaf_2
|
61 |
+
- conda-forge::seaborn=0.13.0=hd8ed1ab_0
|
62 |
+
- conda-forge::matplotlib=3.8.1=py310hff52083_0
|
63 |
+
#--- IA
|
64 |
+
- conda-forge::hdbscan=0.8.33=py310h1f7b6fc_4
|
65 |
+
- conda-forge::imbalanced-learn>= 0.8.0
|
66 |
+
- conda-forge::pyts >= 0.12.0
|
67 |
+
- conda-forge::umap-learn=0.5.4=py310hff52083_0
|
68 |
+
- conda-forge::scikit-learn=1.2.2=py310hf7d194e_2
|
69 |
+
- conda-forge::wandb=0.14.2=pyhd8ed1ab_0
|
70 |
+
- fastai::fastai>=2.7.9
|
71 |
+
- timeseriesai::tsai
|
72 |
+
- conda-forge::feather-format
|
docker/postBuild
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
#jupyter lab build --dev-build=False --minimize=False
|
requirements.txt → docker/requirements.txt
RENAMED
File without changes
|
r_shiny_app/execution_id
ADDED
Binary file (328 Bytes). View file
|
|
r_shiny_app/global.R
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# R dependencies
|
2 |
+
library(shiny)
|
3 |
+
library(shinyjs)
|
4 |
+
library(reticulate)
|
5 |
+
library(purrr)
|
6 |
+
library(jsonlite)
|
7 |
+
library(tibble)
|
8 |
+
library(ggplot2)
|
9 |
+
library(glue)
|
10 |
+
library(shinycssloaders)
|
11 |
+
library(tidyr)
|
12 |
+
library(data.table)
|
13 |
+
library(dplyr)
|
14 |
+
library(dygraphs)
|
15 |
+
library(shinyWidgets)
|
16 |
+
library(RColorBrewer)
|
17 |
+
library(pals)
|
18 |
+
library(stringr)
|
19 |
+
##################QUITAR CUANDO YA TIRE
|
20 |
+
library(reactlog)
|
21 |
+
library(feather)
|
22 |
+
library(arrow)
|
23 |
+
library(fasttime)
|
24 |
+
library(parallel)
|
25 |
+
#library(shinythemes)
|
26 |
+
library(xts)
|
27 |
+
|
28 |
+
reactlog::reactlog_enable()
|
29 |
+
#options(shiny.trace = TRUE, shiny.loglevel = "DEBUG", shiny.app_log_path = "app/shiny_logs_internal")
|
30 |
+
|
31 |
+
torch <- reticulate::import("torch")
|
32 |
+
#options(shiny.trace = TRUE)
|
33 |
+
if(torch$cuda$is_available()){
|
34 |
+
print(paste0("CUDA AVAILABLE. Num devices: ", torch$cuda$device_count()))
|
35 |
+
torch$cuda$set_device(as.integer(1))
|
36 |
+
#torch$cuda$set_device(as.integer(1))
|
37 |
+
#torch$cuda$set_device(as.integer(2))
|
38 |
+
#print(torch$cuda$memory_summary())
|
39 |
+
print(Sys.getenv("PYTORCH_CUDA_ALLOC_CONF"))
|
40 |
+
} else {
|
41 |
+
print("CUDA NOT AVAILABLE")
|
42 |
+
}
|
43 |
+
#################QUITAR CUANDO YA TIRE
|
44 |
+
|
45 |
+
# Python dependencies
|
46 |
+
#tsai_data = import("tsai.data.all")
|
47 |
+
#wandb = import("wandb")
|
48 |
+
#pd = import("pandas")
|
49 |
+
#hdbscan = import("hdbscan")
|
50 |
+
#dvats = import_from_path("dvats.all", path=paste0(Sys.getenv("HOME")))
|
51 |
+
############Just in case. Trying to get why get_enc_embs gets freezed
|
52 |
+
# Python dependencies
|
53 |
+
tsai_data = reticulate::import("tsai.data.all")
|
54 |
+
wandb = reticulate::import("wandb")
|
55 |
+
pd = reticulate::import("pandas")
|
56 |
+
hdbscan = reticulate::import("hdbscan")
|
57 |
+
dvats = reticulate::import_from_path("dvats.all", path=paste0(Sys.getenv("HOME")))
|
58 |
+
|
59 |
+
|
60 |
+
print("--> py_config ")
|
61 |
+
print(reticulate::py_config())
|
62 |
+
print("py_config -->")
|
63 |
+
|
64 |
+
#############
|
65 |
+
# CONFIG #
|
66 |
+
#############
|
67 |
+
|
68 |
+
QUERY_RUNS_LIMIT = 1
|
69 |
+
DEFAULT_PATH_WANDB_ARTIFACTS = paste0(Sys.getenv("HOME"), "/data/wandb_artifacts")
|
70 |
+
hdbscan_metrics <- hdbscan$dist_metrics$METRIC_MAPPING
|
71 |
+
#hdbscan_metrics <- c('euclidean', 'l2', 'l1', 'manhattan', 'cityblock', 'braycurtis', 'canberra', 'chebyshev', 'correlation', 'cosine', 'dice', 'hamming', 'jaccard', 'kulsinski', 'mahalanobis', 'matching', 'minkowski', 'rogerstanimoto', 'russellrao', 'seuclidean', 'sokalmichener', 'sokalsneath', 'sqeuclidean', 'yule', 'wminkowski', 'nan_euclidean', 'haversine')
|
72 |
+
Sys.setenv("TZ"="UTC")
|
73 |
+
DEFAULT_VALUES = list(metric_hdbscan = "euclidean",
|
74 |
+
min_cluster_size_hdbscan = 100,
|
75 |
+
min_samples_hdbscan = 15,
|
76 |
+
cluster_selection_epsilon_hdbscan = 0.08,
|
77 |
+
path_line_size = 0.08,
|
78 |
+
path_alpha = 5/10,
|
79 |
+
point_alpha = 1/10,
|
80 |
+
point_size = 1)
|
81 |
+
WANDB_ENTITY = Sys.getenv("WANDB_ENTITY")
|
82 |
+
WANDB_PROJECT = Sys.getenv("WANDB_PROJECT")
|
83 |
+
|
84 |
+
|
85 |
+
####################
|
86 |
+
# HELPER FUNCTIONS #
|
87 |
+
####################
|
88 |
+
|
89 |
+
get_window_indices = function(idxs, w, s) {
|
90 |
+
idxs %>% map(function (i) {
|
91 |
+
start_index = ((i-1)*s + 1)
|
92 |
+
return(start_index:(start_index+w-1))
|
93 |
+
})
|
94 |
+
}
|
95 |
+
|
96 |
+
dyUnzoom <-function(dygraph) {
|
97 |
+
dyPlugin(
|
98 |
+
dygraph = dygraph,
|
99 |
+
name = "Unzoom",
|
100 |
+
path = system.file("plugins/unzoom.js", package = "dygraphs")
|
101 |
+
)
|
102 |
+
}
|
103 |
+
|
104 |
+
vec_dyShading <- function(dyg, from, to, color, data_rownames) {
|
105 |
+
|
106 |
+
# assuming that from, to, and color have all same length
|
107 |
+
n <- length(from)
|
108 |
+
if (n == 0) return(dyg)
|
109 |
+
|
110 |
+
new_shades <- vector(mode = "list", length = n)
|
111 |
+
for (i in 1:n) {
|
112 |
+
new_shades[[i]] <- list(from = data_rownames[from[[i]]],
|
113 |
+
to = data_rownames[to[[i]]],
|
114 |
+
color = color,
|
115 |
+
axis = "x")
|
116 |
+
}
|
117 |
+
dyg$x$shadings <- c(dyg$x$shadings, new_shades)
|
118 |
+
dyg
|
119 |
+
}
|
120 |
+
|
121 |
+
# Not used yet (it is likely to be used in the future)
|
122 |
+
make_individual_dygraph <- function(i){
|
123 |
+
plt <- dygraph(tsdf()[i],height= "170",group = "timeseries", ylab = names(tsdf())[i],width="100%") %>%
|
124 |
+
dySeries(color=color_scale_dygraph[i]) %>%
|
125 |
+
dyHighlight(hideOnMouseOut = TRUE) %>%
|
126 |
+
dyOptions(labelsUTC = TRUE) %>%
|
127 |
+
dyLegend(show = "follow", hideOnMouseOut = TRUE) %>%
|
128 |
+
dyUnzoom() %>%
|
129 |
+
dyHighlight(highlightSeriesOpts = list(strokeWidth = 3)) %>%
|
130 |
+
dyCSS(
|
131 |
+
textConnection(
|
132 |
+
"
|
133 |
+
.dygraph-ylabel {font-size: 9px; width: 80%;text-align: center;float: right}
|
134 |
+
.dygraph-legend > span { display: none; }
|
135 |
+
.dygraph-legend > span.highlight { display: inline; }"
|
136 |
+
)
|
137 |
+
)
|
138 |
+
if(i==1){
|
139 |
+
plt <-plt %>%
|
140 |
+
dyRangeSelector(height = 20, strokeColor = "")
|
141 |
+
}
|
142 |
+
plt
|
143 |
+
}
|
144 |
+
|
145 |
+
|
146 |
+
##############################################
|
147 |
+
# RETRIEVE WANDB RUNS & ARTIFACTS #
|
148 |
+
##############################################
|
149 |
+
|
150 |
+
api <- wandb$Api()
|
151 |
+
|
152 |
+
print("Querying encoders")
|
153 |
+
encs_l <- dvats$get_wandb_artifacts(project_path = glue(WANDB_ENTITY, "/", WANDB_PROJECT),
|
154 |
+
type = "learner",
|
155 |
+
last_version=F) %>%
|
156 |
+
discard(~ is_empty(.$aliases) | is_empty(.$metadata$train_artifact))
|
157 |
+
encs_l <- encs_l %>% set_names(encs_l %>% map(~ glue(WANDB_ENTITY, "/", WANDB_PROJECT, "/", .$name)))
|
158 |
+
#discard(~ str_detect(.$name, "dcae"))
|
159 |
+
|
160 |
+
print("Done!")
|
r_shiny_app/server-helper.R
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Function for parallel timeindex conversion
|
2 |
+
parallel_posfix <- function(df) {
|
3 |
+
chunk_size = 100000
|
4 |
+
num_chunks = ceiling(nrow(df)/chunk_size)
|
5 |
+
chunks=split(df$timeindex, ceiling(seq_along(df$timeindex)/chunk_size))
|
6 |
+
|
7 |
+
print(paste0("Parallel posfix | Chunks: ", num_chunks))
|
8 |
+
cl = parallel::makeCluster(4)
|
9 |
+
parallel::clusterEvalQ(cl, library(fasttime))
|
10 |
+
|
11 |
+
print(paste0("Parallel posfix | Cluster ", cl, " of ", detectCores()))
|
12 |
+
flush.console()
|
13 |
+
|
14 |
+
result <- parallel::clusterApply(cl, chunks, function(chunk) {
|
15 |
+
cat("Processing chunk\n")
|
16 |
+
flush.console()
|
17 |
+
#fasttime::fastPOSIXct(chunk, format = "%Y-%m-%d %H:%M:%S")
|
18 |
+
as.POSIXct(chunk)
|
19 |
+
})
|
20 |
+
stopCluster(cl)
|
21 |
+
print(" Reactive tsdf | Make conversion -->")
|
22 |
+
print(" Reactive tsdf | Make conversion ")
|
23 |
+
flush.console()
|
24 |
+
return(unlist(result))
|
25 |
+
}
|
26 |
+
|
27 |
+
# Get next index for the projection plot
|
28 |
+
set_plot_id <- function(prj_plot_id)({
|
29 |
+
prj_plot_id(prj_plot_id()+1)
|
30 |
+
})
|
31 |
+
|
32 |
+
# Get projection plot name
|
33 |
+
get_prjs_plot_name <- function(dataset_name, encoder_name, selected, cluster, prj_plot_id, input){
|
34 |
+
set_plot_id()
|
35 |
+
plt_name <- paste0(
|
36 |
+
execution_id, "_",
|
37 |
+
prj_plot_id(), "_",
|
38 |
+
dataset_name, "_",
|
39 |
+
encoder_name, "_",
|
40 |
+
input$cpu_flag, "_",
|
41 |
+
input$dr_method, "_",
|
42 |
+
input$clustering_options, "_",
|
43 |
+
"zoom", "_",
|
44 |
+
input$zoom_btn, "_",
|
45 |
+
"point_alpha_",
|
46 |
+
input$point_alpha, "_",
|
47 |
+
"show_lines_",
|
48 |
+
input$show_lines, "_",
|
49 |
+
"prjs.png"
|
50 |
+
)
|
51 |
+
print(paste0("embeddings plot name", plt_name))
|
52 |
+
plt_name
|
53 |
+
}
|
54 |
+
|
55 |
+
get_ts_plot_name <- function(dataset_name, encoder_name, prj_plot_id, input){
|
56 |
+
print("Getting timeserie plot name")
|
57 |
+
plt_name <- paste0(dataset_name, "_", encoder_name, input$dr_method, "_ts.html")
|
58 |
+
print(paste0("ts plot name: ", plt_name))
|
59 |
+
plt_name
|
60 |
+
}
|
r_shiny_app/server.R
ADDED
@@ -0,0 +1,1087 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
# This is the server logic of a Shiny web application. You can run the
|
3 |
+
# application by clicking 'Run App' above.
|
4 |
+
#
|
5 |
+
# Find out more about building applications with Shiny here:
|
6 |
+
#
|
7 |
+
# http://shiny.rstudio.com/
|
8 |
+
#
|
9 |
+
###########3 devtools::install_github("apache/arrow/r", ref = "tags/apache-arrow-14.0.0", subdir = "arrow/r")
|
10 |
+
|
11 |
+
|
12 |
+
source("./server-helper.R")
|
13 |
+
|
14 |
+
shinyServer(function(input, output, session) {
|
15 |
+
options(shiny.verbose = TRUE)
|
16 |
+
#options(shiny.error = function() {
|
17 |
+
# traceback()
|
18 |
+
# stopApp()
|
19 |
+
#})
|
20 |
+
|
21 |
+
######################
|
22 |
+
# REACTIVES VALUES #
|
23 |
+
######################
|
24 |
+
|
25 |
+
# Reactive values created to update the current range of the main slider input
|
26 |
+
#slider_range <- reactiveValues(min_value = 1, max_value = 2)
|
27 |
+
|
28 |
+
# Reactive value created to keep updated the selected precomputed clusters_labels artifact
|
29 |
+
precomputed_clusters <- reactiveValues(selected = NULL)
|
30 |
+
|
31 |
+
|
32 |
+
# Reactive value created to keep updated the selected clustering option
|
33 |
+
clustering_options <- reactiveValues(selected = "no_clusters")
|
34 |
+
|
35 |
+
|
36 |
+
# Reactive value created to configure the graph brush
|
37 |
+
ranges <- reactiveValues(x = NULL, y = NULL)
|
38 |
+
|
39 |
+
|
40 |
+
# Reactive value created to configure clusters options
|
41 |
+
clusters_config <- reactiveValues(
|
42 |
+
metric_hdbscan = DEFAULT_VALUES$metric_hdbscan,
|
43 |
+
min_cluster_size_hdbscan = DEFAULT_VALUES$min_cluster_size_hdbscan,
|
44 |
+
min_samples_hdbscan = DEFAULT_VALUES$min_samples_hdbscan,
|
45 |
+
cluster_selection_epsilon_hdbscan = DEFAULT_VALUES$cluster_selection_epsilon_hdbscan
|
46 |
+
)
|
47 |
+
|
48 |
+
# Reactive values created to configure the appearance of the projections graph.
|
49 |
+
config_style <- reactiveValues(
|
50 |
+
path_line_size = DEFAULT_VALUES$path_line_size,
|
51 |
+
path_alpha = DEFAULT_VALUES$path_alpha,
|
52 |
+
point_alpha = DEFAULT_VALUES$point_alpha,
|
53 |
+
point_size = DEFAULT_VALUES$point_size
|
54 |
+
)
|
55 |
+
|
56 |
+
# Reactive value created to store time series selected variables
|
57 |
+
ts_variables <- reactiveValues(selected = NULL)
|
58 |
+
|
59 |
+
# Reactive value created to store the encoder_input
|
60 |
+
X <- reactiveVal()
|
61 |
+
|
62 |
+
# Reactive value created to store encoder artifact stride
|
63 |
+
enc_ar_stride <- eventReactive(enc_ar(), {
|
64 |
+
stride = ceiling(enc_ar()$metadata$stride/2)
|
65 |
+
})
|
66 |
+
|
67 |
+
# Time series artifact
|
68 |
+
ts_ar <- eventReactive(
|
69 |
+
input$dataset,
|
70 |
+
{
|
71 |
+
req(input$dataset)
|
72 |
+
ar <- api$artifact(input$dataset, type='dataset')
|
73 |
+
on.exit({print("eventReactive ts_ar -->"); flush.console()})
|
74 |
+
ar
|
75 |
+
}, label = "ts_ar")
|
76 |
+
|
77 |
+
|
78 |
+
# Reactive value for indexing saved projections plot
|
79 |
+
prj_plot_id <- reactiveVal(0)
|
80 |
+
|
81 |
+
#################################
|
82 |
+
# OBSERVERS & OBSERVERS EVENTS #
|
83 |
+
#################################
|
84 |
+
observeEvent(
|
85 |
+
req(exists("encs_l")),
|
86 |
+
{
|
87 |
+
freezeReactiveValue(input, "dataset")
|
88 |
+
print("observeEvent encoders list enc_l | update dataset list | after freeze")
|
89 |
+
updateSelectizeInput(
|
90 |
+
session = session,
|
91 |
+
inputId = "dataset",
|
92 |
+
choices = encs_l %>%
|
93 |
+
map(~.$metadata$train_artifact) %>%
|
94 |
+
set_names()
|
95 |
+
)
|
96 |
+
on.exit({print("observeEvent encoders list encs_l | update dataset list -->"); flush.console()})
|
97 |
+
},
|
98 |
+
label = "input_dataset"
|
99 |
+
)
|
100 |
+
|
101 |
+
observeEvent(input$dataset, {
|
102 |
+
#req(encs_l)
|
103 |
+
print("--> observeEvent input_dataset | update encoder list")
|
104 |
+
print(input$dataset)
|
105 |
+
freezeReactiveValue(input, "encoder")
|
106 |
+
print(paste0("observeEvent input_dataset | update encoders for dataset ", input$dataset))
|
107 |
+
updateSelectizeInput(
|
108 |
+
session = session,
|
109 |
+
inputId = "encoder",
|
110 |
+
choices = encs_l %>%
|
111 |
+
keep(~ .$metadata$train_artifact == input$dataset) %>%
|
112 |
+
#map(~ .$metadata$enc_artifact) %>%
|
113 |
+
names
|
114 |
+
)
|
115 |
+
### TODO: Ver cómo poner bien esta ñapa para que no se actualizen los gráficos antes que el stride
|
116 |
+
updateSliderInput(session, "stride", value = 0)
|
117 |
+
################
|
118 |
+
on.exit(
|
119 |
+
{print("observeEvent input_dataset | update encoder list -->"); flush.console()}
|
120 |
+
)
|
121 |
+
}, label = "input_encoder")
|
122 |
+
|
123 |
+
observeEvent(
|
124 |
+
input$encoder,
|
125 |
+
{
|
126 |
+
#req(input$dataset, encs_l)
|
127 |
+
#enc_ar = req(enc_ar())
|
128 |
+
print("--> observeEvent input_encoder | update wlen")
|
129 |
+
freezeReactiveValue(input, "wlen")
|
130 |
+
print("observeEvent input_encoder | update wlen | Before enc_ar")
|
131 |
+
enc_ar = enc_ar()
|
132 |
+
print(paste0("observeEvent input_encoder | update wlen | enc_ar: ", enc_ar))
|
133 |
+
print("observeEvent input_encoder | update wlen | Set wlen slider values")
|
134 |
+
if (is.null(enc_ar$metadata$mvp_ws)) {
|
135 |
+
print("observeEvent input_encoder | update wlen | Set wlen slider values from w | ")
|
136 |
+
enc_ar$metadata$mvp_ws = c(enc_ar$metadata$w, enc_ar$metadata$w)
|
137 |
+
}
|
138 |
+
print(paste0("observeEvent input_encoder | update wlen | enc_ar$metadata$mvp_ws ", enc_ar$metadata$mvp_ws ))
|
139 |
+
wmin <- enc_ar$metadata$mvp_ws[1]
|
140 |
+
wmax <- enc_ar$metadata$mvp_ws[2]
|
141 |
+
wlen <- enc_ar$metadata$w
|
142 |
+
print(paste0("observeEvent input_encoder | update wlen | Update slider input (", wmin, ", ", wmax, " ) -> ", wlen ))
|
143 |
+
updateSliderInput(session = session, inputId = "wlen",
|
144 |
+
min = wmin,
|
145 |
+
max = wmax,
|
146 |
+
value = wlen
|
147 |
+
)
|
148 |
+
updateSliderInput(
|
149 |
+
session = session, inputId = "stride",
|
150 |
+
min = 1, max = input$wlen,
|
151 |
+
value = enc_ar_stride()
|
152 |
+
)
|
153 |
+
on.exit({print("observeEvent input_encoder | update wlen -->"); flush.console()})
|
154 |
+
}
|
155 |
+
)
|
156 |
+
|
157 |
+
# Obtener el valor de stride
|
158 |
+
enc_ar_stride = reactive({
|
159 |
+
print("--> reactive enc_ar_stride")
|
160 |
+
stride = ceiling(enc_ar()$metadata$mvp_ws[2]/2) #<- enc_ar()$metadata$stride
|
161 |
+
on.exit({print(paste0("reactive_enc_ar_stride | --> ", stride)); flush.console()})
|
162 |
+
stride
|
163 |
+
})
|
164 |
+
|
165 |
+
observeEvent(input$wlen, {
|
166 |
+
req(input$wlen)
|
167 |
+
print(paste0("--> observeEvent input_wlen | update slide stride value | wlen ", input$wlen))
|
168 |
+
tryCatch({
|
169 |
+
old_value = input$stride
|
170 |
+
if (input$stride == 0 | input$stride == 1){
|
171 |
+
old_value = enc_ar_stride()
|
172 |
+
print(paste0("enc_ar_stride: ", old_value))
|
173 |
+
}
|
174 |
+
freezeReactiveValue(input, "stride")
|
175 |
+
print(paste0("oserveEvent input_wlen | update slide stride value | Update stride to ", old_value))
|
176 |
+
updateSliderInput(
|
177 |
+
session = session, inputId = "stride",
|
178 |
+
min = 1, max = input$wlen,
|
179 |
+
value = ifelse(old_value <= input$wlen, old_value, 1)
|
180 |
+
)
|
181 |
+
},
|
182 |
+
error = function(e){
|
183 |
+
print(paste0("observeEvent input_wlen | update slide stride value | Error | ", e$message))
|
184 |
+
},
|
185 |
+
warning = function(w) {
|
186 |
+
message(paste0("observeEvent input_wlen | update slide stride value | Warning | ", w$message))
|
187 |
+
}
|
188 |
+
)
|
189 |
+
on.exit({print(paste0(
|
190 |
+
"observeEvent input_wlen | update slide stride value | Finally | wlen min ",
|
191 |
+
1, " max ", input$wlen, " current value ", input$stride, " -->")); flush.console()})
|
192 |
+
})
|
193 |
+
|
194 |
+
# Update "metric_hdbscan" selectInput when the app is loaded
|
195 |
+
observe({
|
196 |
+
updateSelectInput(
|
197 |
+
session = session,
|
198 |
+
inputId = "metric_hdbscan",
|
199 |
+
choices = names(req(hdbscan_metrics))
|
200 |
+
)
|
201 |
+
})
|
202 |
+
# Update the range of point selection when there is new data
|
203 |
+
# observeEvent(X(), {
|
204 |
+
# #max_ = ts_ar()$metadata$TS$n_samples
|
205 |
+
# max_ = dim(X())[[1]]
|
206 |
+
# freezeReactiveValue(input, "points_emb")
|
207 |
+
# updateSliderInput(session = session, inputId = "points_emb",
|
208 |
+
# min = 1, max = max_, value = c(1, max_))
|
209 |
+
# })
|
210 |
+
|
211 |
+
# Update selected time series variables and update interface config
|
212 |
+
observeEvent(tsdf(), {
|
213 |
+
print("--> observeEvent tsdf | update select variables")
|
214 |
+
on.exit({print("--> observeEvent tsdf | update select variables -->"); flush.console()})
|
215 |
+
freezeReactiveValue(input, "select_variables")
|
216 |
+
#ts_variables$selected = names(tsdf())[names(tsdf()) != "timeindex"]
|
217 |
+
ts_variables$selected = names(isolate(tsdf()))
|
218 |
+
print(paste0("observeEvent tsdf | select variables ", ts_variables$selected))
|
219 |
+
updateCheckboxGroupInput(
|
220 |
+
session = session,
|
221 |
+
inputId = "select_variables",
|
222 |
+
choices = ts_variables$selected,
|
223 |
+
selected = ts_variables$selected
|
224 |
+
)
|
225 |
+
}, label = "select_variables")
|
226 |
+
|
227 |
+
# Update precomputed_clusters reactive value when the input changes
|
228 |
+
observeEvent(input$clusters_labels_name, {
|
229 |
+
print("--> observe | precomputed_cluster selected ")
|
230 |
+
precomputed_clusters$selected <- req(input$clusters_labels_name)
|
231 |
+
print(paste0("observe | precomputed_cluster selected --> | ", precomputed_cluster$selected))
|
232 |
+
})
|
233 |
+
|
234 |
+
|
235 |
+
# Update clustering_options reactive value when the input changes
|
236 |
+
observe({
|
237 |
+
print("--> Observe clustering options")
|
238 |
+
clustering_options$selected <- req(input$clustering_options)
|
239 |
+
print("Observe clustering options -->")
|
240 |
+
})
|
241 |
+
|
242 |
+
# Update clusters_config reactive values when user clicks on "calculate_clusters" button
|
243 |
+
observeEvent(input$calculate_clusters, {
|
244 |
+
print("--> observe event calculate_clusters | update clusters_config")
|
245 |
+
clusters_config$metric_hdbscan <- req(input$metric_hdbscan)
|
246 |
+
clusters_config$min_cluster_size_hdbscan <- req(input$min_cluster_size_hdbscan)
|
247 |
+
clusters_config$min_samples_hdbscan <- req(input$min_samples_hdbscan)
|
248 |
+
clusters_config$cluster_selection_epsilon_hdbscan <- req(input$cluster_selection_epsilon_hdbscan)
|
249 |
+
#on.exit({print("observe event calculate_clusters | update clusters_config -->"))
|
250 |
+
})
|
251 |
+
|
252 |
+
|
253 |
+
# Observe the events related to zoom the projections graph
|
254 |
+
observeEvent(input$zoom_btn, {
|
255 |
+
|
256 |
+
print("--> observeEvent zoom_btn")
|
257 |
+
brush <- input$projections_brush
|
258 |
+
if (!is.null(brush)) {
|
259 |
+
if(isTRUE(input$zoom_btn)){
|
260 |
+
ranges$x <- c(brush$xmin, brush$xmax)
|
261 |
+
ranges$y <- c(brush$ymin, brush$ymax)
|
262 |
+
}else {
|
263 |
+
ranges$x <- NULL
|
264 |
+
ranges$y <- NULL
|
265 |
+
}
|
266 |
+
|
267 |
+
} else {
|
268 |
+
ranges$x <- NULL
|
269 |
+
ranges$y <- NULL
|
270 |
+
}
|
271 |
+
})
|
272 |
+
|
273 |
+
|
274 |
+
# Observe the events related to change the appearance of the projections graph
|
275 |
+
observeEvent(input$update_prj_graph,{
|
276 |
+
style_values <- list(path_line_size = input$path_line_size ,
|
277 |
+
path_alpha = input$path_alpha,
|
278 |
+
point_alpha = input$point_alpha,
|
279 |
+
point_size = input$point_size)
|
280 |
+
|
281 |
+
if (!is.null(style_values)) {
|
282 |
+
config_style$path_line_size <- style_values$path_line_size
|
283 |
+
config_style$path_alpha <- style_values$path_alpha
|
284 |
+
config_style$point_alpha <- style_values$point_alpha
|
285 |
+
config_style$point_size <- style_values$point_size
|
286 |
+
} else {
|
287 |
+
config_style$path_line_size <- NULL
|
288 |
+
config_style$path_alpha <- NULL
|
289 |
+
config_style$point_alpha <- NULL
|
290 |
+
config_style$point_size <- NULL
|
291 |
+
}
|
292 |
+
})
|
293 |
+
|
294 |
+
|
295 |
+
# Update ts_variables reactive value when time series variable selection changes
|
296 |
+
observeEvent(input$select_variables, {
|
297 |
+
ts_variables$selected <- input$select_variables
|
298 |
+
})
|
299 |
+
|
300 |
+
|
301 |
+
# Observe to check/uncheck all variables
|
302 |
+
observeEvent(input$selectall,{
|
303 |
+
req(tsdf)
|
304 |
+
ts_variables$selected <- names(isolate(tsdf()))
|
305 |
+
if(input$selectall %%2 == 0){
|
306 |
+
updateCheckboxGroupInput(session = session,
|
307 |
+
inputId = "select_variables",
|
308 |
+
choices = ts_variables$selected,
|
309 |
+
selected = ts_variables$selected)
|
310 |
+
} else {
|
311 |
+
updateCheckboxGroupInput(session = session,
|
312 |
+
inputId = "select_variables",
|
313 |
+
choices = ts_variables$selected,
|
314 |
+
selected = NULL)
|
315 |
+
}
|
316 |
+
})
|
317 |
+
# Observe to update encoder input (enc_input = X())
|
318 |
+
observe({ #Event(input$dataset, input$encoder, input$wlen, input$stride, {
|
319 |
+
req(input$wlen != 0, input$stride != 0, input$stride != 1)
|
320 |
+
print(paste0("Check reactiveness | X | wlen, stride |"))
|
321 |
+
if (
|
322 |
+
is.null(X()) ||
|
323 |
+
!identical(
|
324 |
+
input$dataset, isolate(input$dataset)) ||
|
325 |
+
!identical(input$encoder, isolate(input$encoder)) ||
|
326 |
+
input$wlen != isolate(input$wlen) ||
|
327 |
+
input$stride != isolate(input$stride)
|
328 |
+
) {
|
329 |
+
print("--> ReactiveVal X | Update Sliding Window")
|
330 |
+
print(paste0("reactive X | wlen ", input$wlen, " | stride ", input$stride, " | Let's prepare data"))
|
331 |
+
print("reactive X | SWV")
|
332 |
+
|
333 |
+
t_x_0 <- Sys.time()
|
334 |
+
|
335 |
+
enc_input = dvats$exec_with_feather_k_output(
|
336 |
+
function_name = "prepare_forecasting_data",
|
337 |
+
module_name = "tsai.data.preparation",
|
338 |
+
path = file.path(DEFAULT_PATH_WANDB_ARTIFACTS, ts_ar()$metadata$TS$hash),
|
339 |
+
k_output = as.integer(0),
|
340 |
+
print_flag = TRUE,
|
341 |
+
time_flag = TRUE,
|
342 |
+
fcst_history = input$wlen
|
343 |
+
)
|
344 |
+
|
345 |
+
t_x_1 <- Sys.time()
|
346 |
+
t_sliding_window_view = t_x_1 - t_x_0
|
347 |
+
print(paste0("reactive X | SWV: ", t_sliding_window_view, " secs "))
|
348 |
+
|
349 |
+
print(paste0("reactive X | Update sliding window | Apply stride ", input$stride," | enc_input ~ ", dim(enc_input), "-->"))
|
350 |
+
print("| Update | X" )
|
351 |
+
on.exit({print("| Outside| X"); flush.console()})
|
352 |
+
X(enc_input)
|
353 |
+
}
|
354 |
+
X()
|
355 |
+
})
|
356 |
+
|
357 |
+
###############
|
358 |
+
# REACTIVES #
|
359 |
+
###############
|
360 |
+
|
361 |
+
# Get timeseries artifact metadata
|
362 |
+
ts_ar_config = reactive({
|
363 |
+
print("--> reactive ts_ar_config | List used artifacts")
|
364 |
+
ts_ar = req(ts_ar())
|
365 |
+
print(paste0("reactive ts_ar_config | List used artifacts | hash", ts_ar$hash))
|
366 |
+
list_used_arts = ts_ar$metadata$TS
|
367 |
+
list_used_arts$vars = ts_ar$metadata$TS$vars %>% stringr::str_c(collapse = "; ")
|
368 |
+
list_used_arts$name = ts_ar$name
|
369 |
+
list_used_arts$aliases = ts_ar$aliases
|
370 |
+
list_used_arts$artifact_name = ts_ar$name
|
371 |
+
list_used_arts$id = ts_ar$id
|
372 |
+
list_used_arts$created_at = ts_ar$created_at
|
373 |
+
list_used_arts
|
374 |
+
on.exit({print("reactive ts_ar_config -->"); flush.console()})
|
375 |
+
})
|
376 |
+
|
377 |
+
# Get encoder artifact
|
378 |
+
enc_ar <- eventReactive (
|
379 |
+
input$encoder,
|
380 |
+
{
|
381 |
+
print(paste0("eventReactive enc_ar | Enc. Artifact: ", input$encoder))
|
382 |
+
result <- tryCatch({
|
383 |
+
api$artifact(input$encoder, type = 'learner')
|
384 |
+
}, error = function(e){
|
385 |
+
print(paste0("eventReactive enc_ar | Error: ", e$message))
|
386 |
+
NULL
|
387 |
+
})
|
388 |
+
on.exit({print("envent reactive enc_ar -->"); flush.console()})
|
389 |
+
result
|
390 |
+
},
|
391 |
+
ignoreInit = T
|
392 |
+
)
|
393 |
+
|
394 |
+
# Encoder
|
395 |
+
enc <- eventReactive(
|
396 |
+
enc_ar(),
|
397 |
+
{
|
398 |
+
req(input$dataset, input$encoder)
|
399 |
+
print("--> eventReactive enc | load encoder ")
|
400 |
+
encoder_artifact <- enc_ar()
|
401 |
+
enc <- py_load_object(
|
402 |
+
file.path(
|
403 |
+
DEFAULT_PATH_WANDB_ARTIFACTS,
|
404 |
+
encoder_artifact$metadata$ref$hash
|
405 |
+
)
|
406 |
+
)
|
407 |
+
on.exit({print("eventReactive enc | load encoder -->"); flush.console()})
|
408 |
+
enc
|
409 |
+
})
|
410 |
+
|
411 |
+
|
412 |
+
|
413 |
+
embs <- reactive({
|
414 |
+
req(X(), enc_l <- enc())
|
415 |
+
print("--> reactive embs | get embeddings")
|
416 |
+
if (torch$cuda$is_available()){
|
417 |
+
print(paste0("CUDA devices: ", torch$cuda$device_count()))
|
418 |
+
} else {
|
419 |
+
print("CUDA NOT AVAILABLE")
|
420 |
+
}
|
421 |
+
t_embs_0 <- Sys.time()
|
422 |
+
print(
|
423 |
+
paste0(
|
424 |
+
"reactive embs | get embeddings | Just about to get embedings. Device number: ",
|
425 |
+
torch$cuda$current_device()
|
426 |
+
)
|
427 |
+
)
|
428 |
+
|
429 |
+
print("reactive embs | get embeddings | Get batch size and dataset")
|
430 |
+
|
431 |
+
dataset_logged_by <- enc_ar()$logged_by()
|
432 |
+
bs = dataset_logged_by$config$batch_size
|
433 |
+
stride = input$stride
|
434 |
+
|
435 |
+
print(paste0("reactive embs | get embeddings (set stride set batch size) | Stride ", input$stride, " | batch size: ", bs ))
|
436 |
+
enc_input = X()
|
437 |
+
#chunk_max = 10000000
|
438 |
+
#shape <- dim(enc_input)
|
439 |
+
#print(paste0("reactive embs | get embeddings (set stride set batch size) | enc_input shape: ", shape ))
|
440 |
+
#chunk_size_ = min(shape[1]*shape[2],chunk_max/(shape[1]*shape[2]))
|
441 |
+
#N = max(3200,floor(chunk_size_/32))
|
442 |
+
chunk_size = 10000000 #N*32
|
443 |
+
#print(paste0("reactive embs | get embeddings (set stride set batch size) | Chunk_size ", chunk_size, " | shape[1]*shape[2]: ", shape[1]*shape[2] ))
|
444 |
+
print(paste0("reactive embs | get embeddings (set stride set batch size) | Chunk_size ", chunk_size))
|
445 |
+
# python_string = paste0("
|
446 |
+
#import dvats.all
|
447 |
+
cpu_flag = ifelse(input$cpu_flag == "CPU", TRUE, FALSE)
|
448 |
+
result = dvats$get_enc_embs_set_stride_set_batch_size(
|
449 |
+
X = X(),
|
450 |
+
print_flag = TRUE,
|
451 |
+
enc_learn = enc_l,
|
452 |
+
stride = input$stride,
|
453 |
+
batch_size = bs,
|
454 |
+
cpu = cpu_flag,
|
455 |
+
print_flag = FALSE,
|
456 |
+
time_flag = TRUE,
|
457 |
+
chunk_size = chunk_size,
|
458 |
+
check_memory_usage = TRUE
|
459 |
+
)
|
460 |
+
|
461 |
+
#result <- system(python_string)
|
462 |
+
t_embs_1 <- Sys.time()
|
463 |
+
diff <- t_embs_1 - t_embs_0
|
464 |
+
diff_secs <- as.numeric(diff, units = "secs")
|
465 |
+
diff_mins <- as.numeric(diff, units = "mins")
|
466 |
+
print(paste0("get_enc_embs total time: ", diff_secs, " secs thus ", diff_mins, " mins"))
|
467 |
+
X <- NULL
|
468 |
+
gc(verbose=TRUE)
|
469 |
+
on.exit({print("reactive embs | get embeddings -->"); flush.console()})
|
470 |
+
result
|
471 |
+
})
|
472 |
+
|
473 |
+
prj_object_cpu <- reactive({
|
474 |
+
embs = req(embs(), input$dr_method)
|
475 |
+
embs = embs[complete.cases(embs),]
|
476 |
+
print("--> prj_object")
|
477 |
+
#print(embs) #--
|
478 |
+
#print(paste0("--> prj_object | UMAP params ", str(umap_params_)))
|
479 |
+
print("--> prj_object | UMAP params ")
|
480 |
+
|
481 |
+
res = switch( input$dr_method,
|
482 |
+
#### Comprobando parametros para saber por qué salen diferentes los embeddings
|
483 |
+
######### Comprobando los parámetros
|
484 |
+
#UMAP = dvats$get_UMAP_prjs(input_data = embs, cpu=F, n_neighbors = 15, min_dist = 0.1, random_state=as.integer(1234)),
|
485 |
+
UMAP = dvats$get_UMAP_prjs(
|
486 |
+
input_data = embs,
|
487 |
+
cpu = TRUE,
|
488 |
+
print_flag = TRUE,
|
489 |
+
n_neighbors = input$prj_n_neighbors,
|
490 |
+
min_dist = input$prj_min_dist,
|
491 |
+
random_state= as.integer(input$prj_random_state)
|
492 |
+
),
|
493 |
+
TSNE = dvats$get_TSNE_prjs(
|
494 |
+
X = embs,
|
495 |
+
cpu = TRUE,
|
496 |
+
random_state=as.integer(input$prj_random_state)
|
497 |
+
),
|
498 |
+
PCA = dvats$get_PCA_prjs(
|
499 |
+
X = embs,
|
500 |
+
cpu = TRUE,
|
501 |
+
random_state=as.integer(input$prj_random_state)
|
502 |
+
)
|
503 |
+
)
|
504 |
+
res = res %>% as.data.frame # TODO: This should be a matrix for improved efficiency
|
505 |
+
colnames(res) = c("xcoord", "ycoord")
|
506 |
+
on.exit({print(" prj_object -->"); flush.console()})
|
507 |
+
flush.console()
|
508 |
+
#browser()
|
509 |
+
res
|
510 |
+
})
|
511 |
+
|
512 |
+
prj_object <- reactive({
|
513 |
+
req(embs(), input$dr_method)
|
514 |
+
print("--> prj_object")
|
515 |
+
t_prj_0 = Sys.time()
|
516 |
+
embs = req(embs())
|
517 |
+
print("prj_object | Before complete cases ")
|
518 |
+
embs = embs[complete.cases(embs),]
|
519 |
+
#print(embs) #--
|
520 |
+
#print(paste0("--> prj_object | UMAP params ", str(umap_params_)))
|
521 |
+
print("prj_object | Before switch ")
|
522 |
+
|
523 |
+
cpu_flag = ifelse(input$cpu_flag == "CPU", TRUE, FALSE)
|
524 |
+
|
525 |
+
res = switch( input$dr_method,
|
526 |
+
#### Comprobando parametros para saber por qué salen diferentes los embeddings
|
527 |
+
######### Comprobando los parámetros
|
528 |
+
#UMAP = dvats$get_UMAP_prjs(input_data = embs, cpu=F, n_neighbors = 15, min_dist = 0.1, random_state=as.integer(1234)),
|
529 |
+
UMAP = dvats$get_UMAP_prjs(
|
530 |
+
input_data = embs,
|
531 |
+
cpu = cpu_flag,
|
532 |
+
print_flag = TRUE,
|
533 |
+
n_neighbors = input$prj_n_neighbors,
|
534 |
+
min_dist = input$prj_min_dist,
|
535 |
+
random_state= as.integer(input$prj_random_state)
|
536 |
+
),
|
537 |
+
TSNE = dvats$get_TSNE_prjs(
|
538 |
+
X = embs,
|
539 |
+
cpu=FALSE,
|
540 |
+
random_state=as.integer(input$prj_random_state)
|
541 |
+
),
|
542 |
+
PCA = dvats$get_PCA_prjs(
|
543 |
+
X = embs,
|
544 |
+
cpu=FALSE,
|
545 |
+
random_state=as.integer(input$prj_random_state)
|
546 |
+
)
|
547 |
+
)
|
548 |
+
res = res %>% as.data.frame # TODO: This should be a matrix for improved efficiency
|
549 |
+
colnames(res) = c("xcoord", "ycoord")
|
550 |
+
t_prj_1 = Sys.time()
|
551 |
+
on.exit({print(paste0(" prj_object | ", t_prj_1-t_prj_0, " seconds -->")); flush.console()})
|
552 |
+
flush.console()
|
553 |
+
res
|
554 |
+
})
|
555 |
+
|
556 |
+
|
557 |
+
|
558 |
+
# Load and filter TimeSeries object from wandb
|
559 |
+
tsdf <- reactive(
|
560 |
+
{
|
561 |
+
req(input$encoder, ts_ar())
|
562 |
+
ts_ar <- req(ts_ar())
|
563 |
+
print(paste0("--> Reactive tsdf | ts artifact ", ts_ar))
|
564 |
+
flush.console()
|
565 |
+
|
566 |
+
t_init <- Sys.time()
|
567 |
+
path = file.path(DEFAULT_PATH_WANDB_ARTIFACTS, ts_ar$metadata$TS$hash)
|
568 |
+
print(paste0("Reactive tsdf | Read feather ", path ))
|
569 |
+
flush.console()
|
570 |
+
df <- read_feather(path, as_data_frame = TRUE, mmap = TRUE) %>% rename('timeindex' = `__index_level_0__`)
|
571 |
+
t_end = Sys.time()
|
572 |
+
print(paste0("Reactive tsdf | Read feather | Execution time: ", t_end - t_init, " seconds"))
|
573 |
+
flush.console()
|
574 |
+
|
575 |
+
t_end = Sys.time()
|
576 |
+
on.exit({print(paste0("Reactive tsdf | Column to index | Execution time: ", t_end - t_init, " seconds"));flush.console()})
|
577 |
+
df
|
578 |
+
})
|
579 |
+
|
580 |
+
# Auxiliary object for the interaction ts->projections
|
581 |
+
tsidxs_per_embedding_idx <- reactive({
|
582 |
+
req(input$wlen != 0, input$stride != 0)
|
583 |
+
get_window_indices(1:nrow(isolate(projections())), w = input$wlen, s = input$stride)
|
584 |
+
})
|
585 |
+
|
586 |
+
# Filter the embedding points and calculate/show the clusters if conditions are met.
|
587 |
+
projections <- reactive({
|
588 |
+
print("--> Projections")
|
589 |
+
req(prj_object(), input$dr_method)
|
590 |
+
#prjs <- req(prj_object()) %>% slice(input$points_emb[[1]]:input$points_emb[[2]])
|
591 |
+
print("Projections | before prjs")
|
592 |
+
prjs <- prj_object()
|
593 |
+
req(input$dataset, input$encoder, input$wlen, input$stride)
|
594 |
+
print("Projections | before switch")
|
595 |
+
switch(clustering_options$selected,
|
596 |
+
precomputed_clusters = {
|
597 |
+
filename <- req(selected_clusters_labels_ar())$metadata$ref$hash
|
598 |
+
clusters_labels <- py_load_object(filename = file.path(DEFAULT_PATH_WANDB_ARTIFACTS, filename))
|
599 |
+
#prjs$cluster <- clusters_labels[input$points_emb[[1]]:input$points_emb[[2]]]
|
600 |
+
prjs$cluster <- clusters_labels
|
601 |
+
},
|
602 |
+
calculate_clusters = {
|
603 |
+
clusters = hdbscan$HDBSCAN(
|
604 |
+
min_cluster_size = as.integer(clusters_config$min_cluster_size_hdbscan),
|
605 |
+
min_samples = as.integer(clusters_config$min_samples_hdbscan),
|
606 |
+
cluster_selection_epsilon = clusters_config$cluster_selection_epsilon_hdbscan,
|
607 |
+
metric = clusters_config$metric_hdbscan
|
608 |
+
)$fit(prjs)
|
609 |
+
score = 0
|
610 |
+
unique_labels <- unique(clusters$labels_)
|
611 |
+
total_unique_labels <- length(unique_labels)
|
612 |
+
if(total_unique_labels > 1){
|
613 |
+
score = dvats$cluster_score(prjs, clusters$labels_, TRUE)
|
614 |
+
}
|
615 |
+
print(paste0("Projections | Score ", score))
|
616 |
+
if (score <= 0) {
|
617 |
+
print(paste0("Projections | Repeat projections with CPU because of low quality clusters | score ", score))
|
618 |
+
prjs <- prj_object_cpu()
|
619 |
+
clusters = hdbscan$HDBSCAN(
|
620 |
+
min_cluster_size = as.integer(clusters_config$min_cluster_size_hdbscan),
|
621 |
+
min_samples = as.integer(clusters_config$min_samples_hdbscan),
|
622 |
+
cluster_selection_epsilon = clusters_config$cluster_selection_epsilon_hdbscan,
|
623 |
+
metric = clusters_config$metric_hdbscan
|
624 |
+
)$fit(prjs)
|
625 |
+
score = 0
|
626 |
+
unique_labels <- unique(clusters$labels_)
|
627 |
+
total_unique_labels <- length(unique_labels)
|
628 |
+
if(total_unique_labels > 1){
|
629 |
+
score = dvats$cluster_score(prjs, clusters$labels_, TRUE)
|
630 |
+
}
|
631 |
+
print(paste0("Projections | Repeat projections with CPU because of low quality clusters | score ", score))
|
632 |
+
}
|
633 |
+
prjs$cluster <- clusters$labels_
|
634 |
+
|
635 |
+
|
636 |
+
})
|
637 |
+
|
638 |
+
on.exit({print("Projections -->"); flush.console()})
|
639 |
+
prjs
|
640 |
+
})
|
641 |
+
|
642 |
+
# Update the colour palette for the clusters
|
643 |
+
update_palette <- reactive({
|
644 |
+
prjs <- req(projections())
|
645 |
+
if ("cluster" %in% names(prjs)) {
|
646 |
+
unique_labels <- unique(prjs$cluster)
|
647 |
+
print(unique_labels)
|
648 |
+
## IF the value "-1" exists, assign the first element of mycolors to #000000, if not, assign the normal colorRampPalette
|
649 |
+
if (as.integer(-1) %in% unique_labels)
|
650 |
+
colour_palette <- append("#000000", colorRampPalette(brewer.pal(12,"Paired"))(length(unique_labels)-1))
|
651 |
+
else
|
652 |
+
colour_palette <- colorRampPalette(brewer.pal(12,"Paired"))(length(unique_labels))
|
653 |
+
}
|
654 |
+
else
|
655 |
+
colour_palette <- "red"
|
656 |
+
|
657 |
+
colour_palette
|
658 |
+
})
|
659 |
+
|
660 |
+
color_palete_window_plot <- colorRampPalette(
|
661 |
+
colors = c("blue", "green"),
|
662 |
+
space = "Lab" # Option used when colors do not represent a quantitative scale
|
663 |
+
)
|
664 |
+
|
665 |
+
start_date <- reactive({
|
666 |
+
isolate(tsdf())$timeindex[1]
|
667 |
+
})
|
668 |
+
|
669 |
+
end_date <- reactive({
|
670 |
+
end_date_id = 100000
|
671 |
+
end_date_id = min(end_date_id, nrow(isolate(tsdf())))
|
672 |
+
isolate(tsdf())$timeindex[end_date_id]
|
673 |
+
})
|
674 |
+
|
675 |
+
ts_plot_base <- reactive({
|
676 |
+
print("--> ts_plot_base")
|
677 |
+
on.exit({print("ts_plot_base -->"); flush.console()})
|
678 |
+
start_date =isolate(start_date())
|
679 |
+
end_date = isolate(end_date())
|
680 |
+
print(paste0("ts_plot_base | start_date: ", start_date, " end_date: ", end_date))
|
681 |
+
t_init <- Sys.time()
|
682 |
+
tsdf_ <- isolate(tsdf()) %>% select(ts_variables$selected, - "timeindex")
|
683 |
+
tsdf_xts <- xts(tsdf_, order.by = tsdf()$timeindex)
|
684 |
+
t_end <- Sys.time()
|
685 |
+
print(paste0("ts_plot_base | tsdf_xts time", t_end-t_init))
|
686 |
+
print(head(tsdf_xts))
|
687 |
+
print(tail(tsdf_xts))
|
688 |
+
ts_plt = dygraph(
|
689 |
+
tsdf_xts,
|
690 |
+
width="100%", height = "400px"
|
691 |
+
) %>%
|
692 |
+
dyRangeSelector(c(start_date, end_date)) %>%
|
693 |
+
dyHighlight(hideOnMouseOut = TRUE) %>%
|
694 |
+
dyOptions(labelsUTC = FALSE ) %>%
|
695 |
+
dyCrosshair(direction = "vertical")%>%
|
696 |
+
dyLegend(show = "follow", hideOnMouseOut = TRUE) %>%
|
697 |
+
dyUnzoom() %>%
|
698 |
+
dyHighlight(highlightSeriesOpts = list(strokeWidth = 3)) %>%
|
699 |
+
dyCSS(
|
700 |
+
textConnection(
|
701 |
+
".dygraph-legend > span { display: none; }
|
702 |
+
.dygraph-legend > span.highlight { display: inline; }"
|
703 |
+
)
|
704 |
+
)
|
705 |
+
|
706 |
+
})
|
707 |
+
|
708 |
+
embedding_ids <- reactive({
|
709 |
+
print("--> embedding idx")
|
710 |
+
on.exit(print("embedding idx -->"))
|
711 |
+
bp = brushedPoints(prj_object(), input$projections_brush, allRows = TRUE) #%>% debounce(miliseconds) #Wait 1 seconds: 1000
|
712 |
+
bp %>% rownames_to_column("index") %>% dplyr::filter(selected_ == TRUE) %>% pull(index) %>% as.integer
|
713 |
+
})
|
714 |
+
|
715 |
+
window_list <- reactive({
|
716 |
+
print("--> window_list")
|
717 |
+
on.exit(print("window_list -->"))
|
718 |
+
# Get the window indices
|
719 |
+
req(length(embedding_ids() > 0))
|
720 |
+
embedding_idxs = embedding_ids()
|
721 |
+
window_indices = get_window_indices(embedding_idxs, input$wlen, input$stride)
|
722 |
+
# Put all the indices in one list and remove duplicates
|
723 |
+
unlist_window_indices = unique(unlist(window_indices))
|
724 |
+
# Calculate a vector of differences to detect idx where a new window should be created
|
725 |
+
diff_vector <- diff(unlist_window_indices,1)
|
726 |
+
# Take indexes where the difference is greater than one (that represent a change of window)
|
727 |
+
idx_window_limits <- which(diff_vector!=1)
|
728 |
+
# Include the first and last index to have a whole set of indexes.
|
729 |
+
idx_window_limits <- c(1, idx_window_limits, length(unlist_window_indices))
|
730 |
+
# Create a reduced window list
|
731 |
+
reduced_window_list <- vector(mode = "list", length = length(idx_window_limits)-1)
|
732 |
+
# Populate the first element of the list with the idx of the first window.
|
733 |
+
reduced_window_list[[1]] <- c(unlist_window_indices[idx_window_limits[1]],
|
734 |
+
unlist_window_indices[idx_window_limits[1+1]])
|
735 |
+
# Populate the rest of the list
|
736 |
+
for (i in 2:(length(idx_window_limits)-1)){
|
737 |
+
reduced_window_list[[i]]<- c(
|
738 |
+
#unlist_window_indices[idx_window_limits[i]+1],
|
739 |
+
#unlist_window_indices[idx_window_limits[i+1]]
|
740 |
+
as.Date(isolate(tsdf())$timeindex[unlist_window_indices[idx_window_limits[i]+1]]),
|
741 |
+
as.Date(isolate(tsdf())$timeindex[unlist_window_indices[idx_window_limits[i+1]]])
|
742 |
+
)
|
743 |
+
}
|
744 |
+
reduced_window_list
|
745 |
+
})
|
746 |
+
|
747 |
+
|
748 |
+
# Generate timeseries data for dygraph dygraph
|
749 |
+
ts_plot <- reactive({
|
750 |
+
print("--> ts_plot | Before req 1")
|
751 |
+
on.exit({print("ts_plot -->"); flush.console()})
|
752 |
+
|
753 |
+
req(tsdf(), ts_variables, input$wlen != 0, input$stride)
|
754 |
+
|
755 |
+
ts_plt = ts_plot_base()
|
756 |
+
|
757 |
+
print("ts_plot | bp")
|
758 |
+
#miliseconds <- ifelse(nrow(tsdf()) > 1000000, 2000, 1000)
|
759 |
+
|
760 |
+
#if (!is.data.frame(bp)) {bp = bp_}
|
761 |
+
print("ts_plot | embedings idxs ")
|
762 |
+
embedding_idxs = embedding_ids()
|
763 |
+
# Calculate windows if conditions are met (if embedding_idxs is !=0, that means at least 1 point is selected)
|
764 |
+
print("ts_plot | Before if")
|
765 |
+
if ((length(embedding_idxs)!=0) & isTRUE(input$plot_windows)) {
|
766 |
+
reduced_window_list = req(window_list())
|
767 |
+
print(paste0("ts_plot | reduced_window_list[1] = ", reduced_window_list[1]))
|
768 |
+
start_indices = min(sapply(reduced_window_list, function(x) x[1]))
|
769 |
+
end_indices = max(sapply(reduced_window_list, function(x) x[2]))
|
770 |
+
|
771 |
+
view_size = end_indices-start_indices+1
|
772 |
+
max_size = 10000
|
773 |
+
|
774 |
+
start_date = isolate(tsdf())$timeindex[start_indices]
|
775 |
+
end_date = isolate(tsdf())$timeindex[end_indices]
|
776 |
+
|
777 |
+
print(paste0("ts_plot | reuced_window_list (", start_date, end_date, ")", "view size ", view_size, "max size ", max_size))
|
778 |
+
|
779 |
+
if (view_size > max_size) {
|
780 |
+
end_date = isolate(tsdf())$timeindex[start_indices + max_size - 1]
|
781 |
+
#range_color = "#FF0000" # Red
|
782 |
+
}
|
783 |
+
|
784 |
+
range_color = "#CCEBD6" # Original
|
785 |
+
|
786 |
+
|
787 |
+
# # Plot the windows
|
788 |
+
count = 0
|
789 |
+
for(ts_idxs in reduced_window_list) {
|
790 |
+
count = count + 1
|
791 |
+
start_event_date = isolate(tsdf())$timeindex[head(ts_idxs, 1)]
|
792 |
+
end_event_date = isolate(tsdf())$timeindex[tail(ts_idxs, 1)]
|
793 |
+
ts_plt <- ts_plt %>% dyShading(
|
794 |
+
from = start_event_date,
|
795 |
+
to = end_event_date,
|
796 |
+
color = range_color
|
797 |
+
)
|
798 |
+
ts_plt <- ts_plt %>% dyRangeSelector(c(start_date, end_date))
|
799 |
+
#%>% dyEvent(
|
800 |
+
# start_event_date,
|
801 |
+
# label = paste0("SW-", count),
|
802 |
+
# labelLoc="bottom" ,
|
803 |
+
# strokePattern = "solid",
|
804 |
+
# color = range_color
|
805 |
+
# ) %>% dyEvent(
|
806 |
+
# end_event_date,
|
807 |
+
# label = paste0("SW-",paste0("SW-", count),
|
808 |
+
# labelLoc="bottom",
|
809 |
+
# strokePattern = "solid"),
|
810 |
+
# color = range_color
|
811 |
+
# )
|
812 |
+
|
813 |
+
}
|
814 |
+
|
815 |
+
ts_plt <- ts_plt
|
816 |
+
# NOTE: This code block allows you to plot shadyng at once.
|
817 |
+
# The traditional method has to plot the dygraph n times
|
818 |
+
# (n being the number of rectangles to plot). With the adjacent
|
819 |
+
# code it is possible to plot the dygraph only once. Currently
|
820 |
+
# it does not work well because there are inconsistencies in the
|
821 |
+
# timezones of the time series and shiny (there is a two-hour shift[the current plot method works well]),
|
822 |
+
# which does not allow this method to be used correctly. If that
|
823 |
+
# were fixed in the future everything would work fine.
|
824 |
+
# num_rects <- length(reduced_window_list)
|
825 |
+
# rects_ini <- vector(mode = "list", length = num_rects)
|
826 |
+
# rects_fin <- vector(mode = "list", length = num_rects)
|
827 |
+
# for(i in 1:num_rects) {
|
828 |
+
# rects_ini[[i]] <- head(reduced_window_list[[i]],1)
|
829 |
+
# rects_fin[[i]] <- tail(reduced_window_list[[i]],1)
|
830 |
+
# }
|
831 |
+
# ts_plt <- vec_dyShading(ts_plt,rects_ini, rects_fin,"red", rownames(tsdf()))
|
832 |
+
}
|
833 |
+
|
834 |
+
ts_plt
|
835 |
+
})
|
836 |
+
|
837 |
+
# Get projections plot name for saving
|
838 |
+
prjs_plot_name <- reactive({
|
839 |
+
dataset_name <- basename(input$dataset)
|
840 |
+
encoder_name <- basename(input$encoder)
|
841 |
+
get_prjs_plot_name(dataset_name, encoder_name, clustering_options$selected, prjs_$cluster, prj_plot_id, input)
|
842 |
+
})
|
843 |
+
|
844 |
+
# Get timeserie plot name for saving
|
845 |
+
ts_plot_name <- reactive({
|
846 |
+
dataset_name <- basename(input$dataset)
|
847 |
+
encoder_name <- basename(input$encoder)
|
848 |
+
get_ts_plot_name(dataset_name, encoder_name, prj_plot_id, input)
|
849 |
+
})
|
850 |
+
|
851 |
+
#############
|
852 |
+
# OUTPUTS #
|
853 |
+
#############
|
854 |
+
|
855 |
+
output$windows_plot <- renderPlot({
|
856 |
+
req(length(embedding_ids()) > 0)
|
857 |
+
reduced_window_list = req(window_list())
|
858 |
+
|
859 |
+
# Convertir a fechas POSIXct
|
860 |
+
reduced_window_df <- do.call(rbind, lapply(reduced_window_list, function(x) {
|
861 |
+
data.frame(
|
862 |
+
start = as.POSIXct(isolate(tsdf())$timeindex[x[1]], origin = "1970-01-01"),
|
863 |
+
end = as.POSIXct(isolate(tsdf())$timeindex[x[2]], origin = "1970-01-01")
|
864 |
+
)
|
865 |
+
}))
|
866 |
+
|
867 |
+
# Establecer límites basados en los datos
|
868 |
+
first_date = min(reduced_window_df$start)
|
869 |
+
last_date = max(reduced_window_df$end)
|
870 |
+
|
871 |
+
left = as.POSIXct(isolate(tsdf())$timeindex[1], origin = "1970-01-01")
|
872 |
+
right = as.POSIXct(isolate(tsdf())$timeindex[nrow(isolate(tsdf()))], origin = "1970-01-01")
|
873 |
+
|
874 |
+
# Configuración del gráfico base
|
875 |
+
par(mar = c(5, 4, 4, 0) + 0.1) #Down Up Left Right
|
876 |
+
plt <- plot(
|
877 |
+
NA,
|
878 |
+
xlim = c(left, right),
|
879 |
+
ylim = c(0, 1),
|
880 |
+
type = "n",
|
881 |
+
xaxt = "n", yaxt = "n",
|
882 |
+
xlab = "", ylab = "",
|
883 |
+
bty = "n")
|
884 |
+
f = "%F %H:%M:%S"
|
885 |
+
axis(1, at = as.numeric(c(left, right)), labels = c(format(first_date, f), format(last_date, f)), cex.axis = 0.7)
|
886 |
+
|
887 |
+
# Añadir líneas verticales
|
888 |
+
colors = color_palete_window_plot(2)
|
889 |
+
abline(
|
890 |
+
v = as.numeric(reduced_window_df$start),
|
891 |
+
col = rep(colors, length.out = nrow(reduced_window_df)),
|
892 |
+
lwd = 1
|
893 |
+
)
|
894 |
+
abline(
|
895 |
+
v = as.numeric(reduced_window_df$end),
|
896 |
+
col = rep(colors, length.out = nrow(reduced_window_df)),
|
897 |
+
lwd = 1
|
898 |
+
)
|
899 |
+
segments(
|
900 |
+
x0 = as.numeric(reduced_window_df$start),
|
901 |
+
x1 = as.numeric(reduced_window_df$end),
|
902 |
+
y0 = 0,
|
903 |
+
y1 = 0,
|
904 |
+
col = rep(colors, length.out = nrow(reduced_window_df)),
|
905 |
+
lwd = 1
|
906 |
+
)
|
907 |
+
text(
|
908 |
+
x = as.numeric(reduced_window_df$start),
|
909 |
+
y = 0,
|
910 |
+
srt = 90,
|
911 |
+
adj = c(1,0.5),
|
912 |
+
labels = paste0("SW-", seq_len(nrow(reduced_window_df)), format(reduced_window_df$start, f)),
|
913 |
+
cex = 1,
|
914 |
+
xpd = TRUE,
|
915 |
+
col = rep(colors, length.out = nrow(reduced_window_df))
|
916 |
+
)
|
917 |
+
|
918 |
+
points(x = as.numeric(left),y = 0, col = "black", pch = 20, cex = 1)
|
919 |
+
points(x = as.numeric(right),y = 0, col = "black", pch = 20, cex = 1)
|
920 |
+
plt
|
921 |
+
},
|
922 |
+
height=200
|
923 |
+
)
|
924 |
+
|
925 |
+
output$windows_text <- renderUI({
|
926 |
+
req(length(embedding_ids()) > 0)
|
927 |
+
reduced_window_list = req(window_list())
|
928 |
+
|
929 |
+
# Crear un conjunto de etiquetas de texto con información de las ventanas
|
930 |
+
window_info <- lapply(1:length(reduced_window_list), function(i) {
|
931 |
+
window <- reduced_window_list[[i]]
|
932 |
+
start <- format(as.POSIXct(isolate(tsdf())$timeindex[window[1]], origin = "1970-01-01"), "%b %d")
|
933 |
+
end <- format(as.POSIXct(isolate(tsdf())$timeindex[window[2]], origin = "1970-01-01"), "%b %d")
|
934 |
+
color <- ifelse(i %% 2 == 0, "green", "blue")
|
935 |
+
HTML(paste0("<div style='color: ", color, "'>Window ", i, ": ", start, " - ", end, "</div>"))
|
936 |
+
})
|
937 |
+
|
938 |
+
# Devuelve todos los elementos de texto como una lista de HTML
|
939 |
+
do.call(tagList, window_info)
|
940 |
+
})
|
941 |
+
|
942 |
+
# Generate encoder info table
|
943 |
+
output$enc_info = renderDataTable({
|
944 |
+
selected_encoder_name <- req(input$encoder)
|
945 |
+
on.exit({print("Encoder artiffact -->"); flush.console()})
|
946 |
+
print(paste0("--> Encoder artiffact", selected_encoder_name))
|
947 |
+
selected_encoder <- encs_l[[selected_encoder_name]]
|
948 |
+
encoder_metadata <- req(selected_encoder$metadata)
|
949 |
+
print(paste0("Encoder artiffact | encoder metadata ", selected_encoder_name))
|
950 |
+
encoder_metadata %>%enframe()
|
951 |
+
})
|
952 |
+
|
953 |
+
# Generate time series info table
|
954 |
+
output$ts_ar_info = renderDataTable({
|
955 |
+
ts_ar_config() %>% enframe()
|
956 |
+
})
|
957 |
+
|
958 |
+
|
959 |
+
|
960 |
+
|
961 |
+
|
962 |
+
# Generate projections plot
|
963 |
+
output$projections_plot <- renderPlot({
|
964 |
+
req(input$dataset, input$encoder, input$wlen != 0, input$stride != 0)
|
965 |
+
print("--> Projections_plot")
|
966 |
+
prjs_ <- req(projections())
|
967 |
+
print("projections_plot | Prepare column highlights")
|
968 |
+
# Prepare the column highlight to color data
|
969 |
+
if (!is.null(input$ts_plot_dygraph_click)) {
|
970 |
+
selected_ts_idx = which(ts_plot()$x$data[[1]] == input$ts_plot_dygraph_click$x_closest_point)
|
971 |
+
projections_idxs = tsidxs_per_embedding_idx() %>% map_lgl(~ selected_ts_idx %in% .)
|
972 |
+
prjs_$highlight = projections_idxs
|
973 |
+
} else {
|
974 |
+
prjs_$highlight = FALSE
|
975 |
+
}
|
976 |
+
# Prepare the column highlight to color data. If input$generate_cluster has not been clicked
|
977 |
+
# the column cluster will not exist in the dataframe, so we create with the value FALSE
|
978 |
+
if(!("cluster" %in% names(prjs_)))
|
979 |
+
prjs_$cluster = FALSE
|
980 |
+
print("projections_plot | GoGo Plot!")
|
981 |
+
plt <- ggplot(data = prjs_) +
|
982 |
+
aes(x = xcoord, y = ycoord, fill = highlight, color = as.factor(cluster)) +
|
983 |
+
scale_colour_manual(name = "clusters", values = req(update_palette())) +
|
984 |
+
geom_point(shape = 21,alpha = config_style$point_alpha, size = config_style$point_size) +
|
985 |
+
scale_shape(solid = FALSE) +
|
986 |
+
#geom_path(size=config_style$path_line_size, colour = "#2F3B65",alpha = config_style$path_alpha) +
|
987 |
+
guides() +
|
988 |
+
scale_fill_manual(values = c("TRUE" = "green", "FALSE" = "NA"))+
|
989 |
+
coord_cartesian(xlim = ranges$x, ylim = ranges$y, expand = TRUE)+
|
990 |
+
theme_void() +
|
991 |
+
theme(legend.position = "none")
|
992 |
+
|
993 |
+
if (input$show_lines){
|
994 |
+
#plt <- plt + geom_path(size=config_style$path_line_size, colour = "#2F3B65",alpha = config_style$path_alpha)
|
995 |
+
plt <- plt + geom_path(linewidth=config_style$path_line_size, colour = "#2F3B65",alpha = config_style$path_alpha)
|
996 |
+
}
|
997 |
+
|
998 |
+
observeEvent(input$savePlot, {
|
999 |
+
plt <- plt + theme(plot.background = element_rect(fill = "white"))
|
1000 |
+
ggsave(filename = prjs_plot_name(), plot = plt, path = "../data/plots/")
|
1001 |
+
})
|
1002 |
+
#observeEvent(c(input$dataset, input$encoder, clustering_options$selected), {
|
1003 |
+
#req(input$dataset, input$encoder)
|
1004 |
+
#print("!-- CUDA?: ", torch$cuda$is_available())
|
1005 |
+
#prjs_ <- req(projections())
|
1006 |
+
#filename <- prjs_plot_name()
|
1007 |
+
#print(paste("saving embedding plot to ",filename))
|
1008 |
+
#ggsave(filename = filename, plot = plt, path="../data/plots/")
|
1009 |
+
#print("Embeding plot saved")
|
1010 |
+
#})
|
1011 |
+
|
1012 |
+
plt
|
1013 |
+
})
|
1014 |
+
|
1015 |
+
|
1016 |
+
# Render projections plot
|
1017 |
+
output$projections_plot_ui <- renderUI(
|
1018 |
+
{
|
1019 |
+
plotOutput(
|
1020 |
+
"projections_plot",
|
1021 |
+
click = "projections_click",
|
1022 |
+
brush = "projections_brush",
|
1023 |
+
height = input$embedding_plot_height
|
1024 |
+
) %>% withSpinner()
|
1025 |
+
}
|
1026 |
+
)
|
1027 |
+
|
1028 |
+
# Render information about the selected point in the time series graph
|
1029 |
+
output$point <- renderText({
|
1030 |
+
req(input$ts_plot_dygraph_click$x_closest_point)
|
1031 |
+
ts_idx = which(ts_plot()$ts$x$data[[1]] == input$ts_plot_dygraph_click$x_closest_point)
|
1032 |
+
paste0('X = ', strftime(req(input$ts_plot_dygraph_click$x_closest_point), "%F %H:%M:%S"),
|
1033 |
+
'; Y = ', req(input$ts_plot_dygraph_click$y_closest_point),
|
1034 |
+
'; X (raw) = ', req(input$ts_plot_dygraph_click$x_closest_point))
|
1035 |
+
})
|
1036 |
+
|
1037 |
+
# Render information about the selected point and brush in the projections graph
|
1038 |
+
output$projections_plot_interaction_info <- renderText({
|
1039 |
+
xy_str <- function(e) {
|
1040 |
+
if(is.null(e)) return("NULL\n")
|
1041 |
+
paste0("x=", round(e$x, 1), " y=", round(e$y, 1), "\n")
|
1042 |
+
}
|
1043 |
+
xy_range_str <- function(e) {
|
1044 |
+
if(is.null(e)) return("NULL\n")
|
1045 |
+
paste0("xmin=", round(e$xmin, 1), " xmax=", round(e$xmax, 1),
|
1046 |
+
" ymin=", round(e$ymin, 1), " ymax=", round(e$ymax, 1))
|
1047 |
+
}
|
1048 |
+
paste0(
|
1049 |
+
"click: ", xy_str(input$projections_click),
|
1050 |
+
"brush: ", xy_range_str(input$projections_brush)
|
1051 |
+
)
|
1052 |
+
})
|
1053 |
+
|
1054 |
+
# Generate time series plot
|
1055 |
+
output$ts_plot_dygraph <- renderDygraph(
|
1056 |
+
{
|
1057 |
+
req (
|
1058 |
+
input$dataset,
|
1059 |
+
input$encoder,
|
1060 |
+
input$wlen != 0,
|
1061 |
+
input$stride != 0
|
1062 |
+
)
|
1063 |
+
#print("Saving time series plot")
|
1064 |
+
ts_plot <- req(ts_plot())
|
1065 |
+
#save_path <- file.path("..", "data", "plots", ts_plot_name())
|
1066 |
+
#htmlwidgets::saveWidget(ts_plot, file = save_path, selfcontained=TRUE)
|
1067 |
+
#print(paste0("Time series plot saved to", save_path))
|
1068 |
+
ts_plot
|
1069 |
+
#req(ts_plot())
|
1070 |
+
}
|
1071 |
+
)
|
1072 |
+
|
1073 |
+
|
1074 |
+
prjs_plot_name <- reactive({
|
1075 |
+
dataset_name <- basename(input$dataset)
|
1076 |
+
encoder_name <- basename(input$encoder)
|
1077 |
+
get_prjs_plot_name(dataset_name, encoder_name, clustering_options$selected, prjs_$cluster)
|
1078 |
+
})
|
1079 |
+
|
1080 |
+
ts_plot_name <- reactive({
|
1081 |
+
dataset_name <- basename(input$dataset)
|
1082 |
+
encoder_name <- basename(input$encoder)
|
1083 |
+
get_ts_plot_name(dataset_name, encoder_name)
|
1084 |
+
})
|
1085 |
+
|
1086 |
+
})
|
1087 |
+
|
r_shiny_app/ui.R
ADDED
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#
|
2 |
+
# This is the user-interface definition of a Shiny web application. You can
|
3 |
+
# run the application by clicking 'Run App' above.
|
4 |
+
#
|
5 |
+
# Find out more about building applications with Shiny here:
|
6 |
+
#
|
7 |
+
# http://shiny.rstudio.com/
|
8 |
+
#
|
9 |
+
|
10 |
+
shinyUI(fluidPage(
|
11 |
+
#theme = shinythemes::shinytheme("cerulean"),
|
12 |
+
# Application title
|
13 |
+
titlePanel("DeepVATS"),
|
14 |
+
|
15 |
+
# Load Shinyjs
|
16 |
+
shinyjs::useShinyjs(),
|
17 |
+
|
18 |
+
# Sidebar with a slider input for number of bins
|
19 |
+
sidebarLayout(
|
20 |
+
sidebarPanel(
|
21 |
+
fluidRow(
|
22 |
+
shiny::actionButton("load_dataset", label = "Load dataset", icon = icon("database")),
|
23 |
+
shiny::actionButton("load_embs", label = "Load embeddings", icon = icon("project-diagram"))
|
24 |
+
),
|
25 |
+
hr(),
|
26 |
+
selectizeInput("dataset", label = "Dataset", choices = NULL),
|
27 |
+
selectizeInput("encoder", label = "Encoder", choices = NULL),
|
28 |
+
#selectizeInput("embs_ar", label = "Select embeddings", choices = names(embs_l)),
|
29 |
+
br(),
|
30 |
+
sliderInput("wlen", "Select window size", min = 0, max = 0, value =0 , step = 1),
|
31 |
+
sliderInput("stride", "Select stride", min = 0, max = 0, value = 0, step = 1),
|
32 |
+
# sliderInput("points_emb", "Select range of points to plot in the projections",
|
33 |
+
# min = 0, max = 0, value = 0, step = 1, ticks = FALSE),
|
34 |
+
#uiOutput("points_prj_controls"),
|
35 |
+
#### TODO: Check. Added for debugging solar 4_secs
|
36 |
+
sliderInput("prj_n_neighbors", "Projections n_neighbors:", min = 1, max = 50, value = 15),
|
37 |
+
sliderInput("prj_min_dist", "Projections min_dist:", min = 0.0001, max = 1, value = 0.1),
|
38 |
+
#sliderInput("prj_random_state", "Projections random_state:", min = 0, max = 2^32-1, value = 1234),
|
39 |
+
sliderInput("prj_random_state", "Projections random_state:", min = 0, max = 2000, value = 1234),
|
40 |
+
################
|
41 |
+
radioButtons("cpu_flag", "Use: ", c("GPU", "CPU"), selected = "GPU", inline = T),
|
42 |
+
radioButtons("dr_method", "Projection method:", c("UMAP", "TSNE", "PCA"), selected="UMAP", inline=T),
|
43 |
+
br(),
|
44 |
+
radioButtons("clustering_options", label = "Select a clustering option", selected = "no_clusters",
|
45 |
+
choices = c("No clusters" = "no_clusters",
|
46 |
+
#"Show precomputed clusters" = "precomputed_clusters",
|
47 |
+
"Calculate and show clusters" = "calculate_clusters")),
|
48 |
+
# conditionalPanel(
|
49 |
+
# condition = "input.clustering_options == 'precomputed_clusters'",
|
50 |
+
# selectInput("clusters_labels_name", label = "Select a clusters_labels artifact", choices = NULL),
|
51 |
+
# tags$b("Selected 'clusters_labels' artifact description:"),
|
52 |
+
# textOutput("clusters_labels_ar_desc")
|
53 |
+
# ),
|
54 |
+
conditionalPanel(
|
55 |
+
condition = "input.clustering_options == 'calculate_clusters'",
|
56 |
+
selectInput("metric_hdbscan", label = "Metric", choices = DEFAULT_VALUES$metric_hdbscan),
|
57 |
+
sliderInput("min_cluster_size_hdbscan", label = "min_cluster_size_hdbscan",
|
58 |
+
value = DEFAULT_VALUES$min_cluster_size_hdbscan, min=0, max=200, step = 1),
|
59 |
+
sliderInput("min_samples_hdbscan", label = "min_samples_hdbscan",
|
60 |
+
value = DEFAULT_VALUES$min_samples_hdbscan, min=0, max=50, step = 1),
|
61 |
+
sliderInput("cluster_selection_epsilon_hdbscan", label = "cluster_selection_epsilon",
|
62 |
+
value = DEFAULT_VALUES$cluster_selection_epsilon_hdbscan, min=0, max=5, step = 0.01),
|
63 |
+
actionBttn(inputId = "calculate_clusters", label = "Calculate and show clusters", style = "bordered",
|
64 |
+
color = "primary", size = "sm", block = TRUE)
|
65 |
+
),
|
66 |
+
),
|
67 |
+
# Show a plot of the generated distribution
|
68 |
+
mainPanel(
|
69 |
+
tabsetPanel(
|
70 |
+
id = "tabs",
|
71 |
+
tabPanel(
|
72 |
+
"Projections",
|
73 |
+
fluidRow(
|
74 |
+
h3("Embedding projections"),
|
75 |
+
fluidRow(
|
76 |
+
column(1,
|
77 |
+
dropdownButton(
|
78 |
+
tags$b("Set height of the projections plot (px):"),
|
79 |
+
numericInput("embedding_plot_height", label = "Height",value =400),
|
80 |
+
hr(),
|
81 |
+
tags$b("Configure aestethics"),
|
82 |
+
sliderInput("path_line_size", label = "path_line_size",
|
83 |
+
value = DEFAULT_VALUES$path_line_size, min=0, max=5, step = 0.01),
|
84 |
+
sliderInput("path_alpha", label = "path_alpha",
|
85 |
+
value = DEFAULT_VALUES$path_alpha, min=0, max=1, step = 0.01),
|
86 |
+
sliderInput("point_alpha", label = "point_alpha",
|
87 |
+
value = DEFAULT_VALUES$point_alpha, min=0, max=1, step = 0.01),
|
88 |
+
sliderInput("point_size", label = "point_size",
|
89 |
+
value = DEFAULT_VALUES$point_size, min=0, max=10, step = 0.5),
|
90 |
+
checkboxInput("show_lines", "Show lines", value = TRUE),
|
91 |
+
actionButton('savePlot', 'Save embedding projections plot'),
|
92 |
+
|
93 |
+
actionBttn(inputId = "update_prj_graph",label = "Update aestethics",style = "simple",
|
94 |
+
color = "primary",icon = icon("bar-chart"),size = "xs", block = TRUE),
|
95 |
+
circle = FALSE, status = "primary",
|
96 |
+
icon = icon("gear"), width = "300px",size = "xs",
|
97 |
+
tooltip = tooltipOptions(title = "Configure the embedding appearance"),
|
98 |
+
inputId = "projections_config"
|
99 |
+
)
|
100 |
+
),
|
101 |
+
column(8,
|
102 |
+
prettyToggle(
|
103 |
+
inputId = "zoom_btn",
|
104 |
+
label_on = "Zoom out",
|
105 |
+
label_off = "Zoom in",
|
106 |
+
shape = "square",
|
107 |
+
outline = TRUE,
|
108 |
+
plain = TRUE,
|
109 |
+
inline = TRUE,
|
110 |
+
icon_on = icon("search-minus"),
|
111 |
+
icon_off = icon("search-plus"),
|
112 |
+
status_on = "danger",
|
113 |
+
status_off = "primary"
|
114 |
+
),
|
115 |
+
materialSwitch(
|
116 |
+
inputId = "plot_windows",
|
117 |
+
label = "Plot windows",
|
118 |
+
status = "info",
|
119 |
+
value = TRUE,
|
120 |
+
inline = TRUE
|
121 |
+
)
|
122 |
+
),
|
123 |
+
column(3)
|
124 |
+
),
|
125 |
+
fluidRow(
|
126 |
+
uiOutput("projections_plot_ui")
|
127 |
+
)
|
128 |
+
),
|
129 |
+
fluidRow(h3("Original data")),
|
130 |
+
fluidRow(
|
131 |
+
dropdownButton(
|
132 |
+
tags$b("Select/deselect variables"),
|
133 |
+
tags$div(style= 'height:200px; overflow-y: scroll',
|
134 |
+
checkboxGroupInput(inputId = "select_variables",
|
135 |
+
label=NULL, choices = NULL, selected = NULL)
|
136 |
+
),
|
137 |
+
actionBttn(inputId = "selectall",label = "Select/Deselect all",style = "simple",
|
138 |
+
color = "primary",icon = icon("check-double"),size = "xs", block = TRUE),
|
139 |
+
hr(),
|
140 |
+
prettySwitch(inputId = "dygraph_sel",label = "Show stacked graphs (Not available yet)",
|
141 |
+
status = "success",fill = TRUE),
|
142 |
+
circle = FALSE, status = "primary", size = "xs",
|
143 |
+
icon = icon("gear"), width = "300px",
|
144 |
+
tooltip = tooltipOptions(title = "Configure the TS appearance"),
|
145 |
+
inputId = "ts_config"
|
146 |
+
)
|
147 |
+
),
|
148 |
+
fluidRow(
|
149 |
+
column(12,
|
150 |
+
#sliderInput(
|
151 |
+
#"nrows", "Select initial data range:",
|
152 |
+
#min = 0, max = 10000,
|
153 |
+
#value = c(0,0),
|
154 |
+
#step = 1000000
|
155 |
+
#),
|
156 |
+
dygraphOutput("ts_plot_dygraph") %>% withSpinner(),
|
157 |
+
plotOutput("windows_plot"),
|
158 |
+
uiOutput("windows_text")
|
159 |
+
)
|
160 |
+
)
|
161 |
+
#verbatimTextOutput("projections_plot_interaction_info"),
|
162 |
+
#verbatimTextOutput("point")
|
163 |
+
|
164 |
+
),
|
165 |
+
tabPanel(
|
166 |
+
"Information",
|
167 |
+
fluidRow(
|
168 |
+
h3("Time series"),
|
169 |
+
dataTableOutput("ts_ar_info"),
|
170 |
+
h3("Configuration of the associated encoder"),
|
171 |
+
dataTableOutput("enc_info")
|
172 |
+
)
|
173 |
+
),
|
174 |
+
)
|
175 |
+
)
|
176 |
+
)
|
177 |
+
|
178 |
+
|
179 |
+
))
|