The hardware and bandwidth for this mirror is donated by dogado GmbH, the Webhosting and Full Service-Cloud Provider. Check out our Wordpress Tutorial.
If you wish to report a bug, or if you are interested in having us mirror your free-software or open-source project, please feel free to contact us at mirror[@]dogado.de.
library(tip)
# ################## NOTE ##################
# Order 3 Tensor dimension: c(d1,d2,d3)
# d1: number of rows
# d2: number of columns
# d3: number of slices
############################################
# Set a random seed for reproducibility
set.seed(007)
# A function to generate an order-3 tensor
<- function(.tensor_dimension, .mean = 0, .sd = 1){
generate_gaussian_tensor array(data = c(rnorm(n = prod(.tensor_dimension),
mean = .mean,
sd = .sd)),
dim = .tensor_dimension)
}
# Define the tensor dimension
<- c(256,256,3)
tensor_dimension
# Generate clusters of tensors
<- lapply(1:10, function(x) generate_gaussian_tensor(.tensor_dimension = tensor_dimension,
c1 .mean = 0,
.sd = 1))
# Generate clusters of tensors
<- lapply(1:10, function(x) generate_gaussian_tensor(.tensor_dimension = tensor_dimension,
c2 .mean = 5,
.sd = 1))
# Generate clusters of tensors
<- lapply(1:10, function(x) generate_gaussian_tensor(.tensor_dimension = tensor_dimension,
c3 .mean = -5,
.sd = 1))
# Make a list of tensors
<- c(c1, c2, c3)
X
# Compute the number of subjects for each cluster
<- length(c1)
n1 <- length(c2)
n2 <- length(c3)
n3
# Create a vector of true labels. True labels are only necessary
# for constructing network graphs that incorporate the true labels;
# this is often useful for research.
<- c(rep("Cluster 1", n1),
true_labels rep("Cluster 2", n2),
rep("Cluster 3", n3))
# Compute the total number of subjects
<- length(X)
n
# Construct the distance matrix
<- matrix(data = NA, nrow = n, ncol = n)
distance_matrix for(i in 1:n){
for(j in i:n){
<- sqrt(sum((X[[i]] - X[[j]])^2))
distance_matrix[i,j] <- distance_matrix[i,j]
distance_matrix[j,i]
}
}
# Compute the temperature parameter estiamte
<- 1/median(distance_matrix[upper.tri(distance_matrix)])
temperature
# For each subject, compute the point estimate for the number of similar
# subjects using univariate multiple change point detection (i.e.)
= get_cpt_neighbors(.distance_matrix = distance_matrix)
init_num_neighbors
# Set the number of burn-in iterations in the Gibbs samlper
# RECOMMENDATION: burn >= 1000
<- 1000
burn
# Set the number of sampling iterations in the Gibbs sampler
# RECOMMENDATION: samples >= 1000
<- 1000
samples
# Set the subject names
<- paste(1:n)
names_subjects
# Run TIP clustering using only the prior
# --> That is, the likelihood function is constant
<- tip(.data = list(),
tip1 .burn = burn,
.samples = samples,
.similarity_matrix = exp(-1.0*temperature*distance_matrix),
.init_num_neighbors = init_num_neighbors,
.likelihood_model = "CONSTANT",
.subject_names = names_subjects,
.num_cores = 1)
#> Bayesian Clustering: Table Invitation Prior Gibbs Sampler
#> burn-in: 1000
#> samples: 1000
#> Likelihood Model: CONSTANT
#>
|
| | 0%
|
| | 1%
|
|= | 1%
|
|= | 2%
|
|== | 2%
|
|== | 3%
|
|== | 4%
|
|=== | 4%
|
|=== | 5%
|
|==== | 5%
|
|==== | 6%
|
|===== | 6%
|
|===== | 7%
|
|===== | 8%
|
|====== | 8%
|
|====== | 9%
|
|======= | 9%
|
|======= | 10%
|
|======= | 11%
|
|======== | 11%
|
|======== | 12%
|
|========= | 12%
|
|========= | 13%
|
|========= | 14%
|
|========== | 14%
|
|========== | 15%
|
|=========== | 15%
|
|=========== | 16%
|
|============ | 16%
|
|============ | 17%
|
|============ | 18%
|
|============= | 18%
|
|============= | 19%
|
|============== | 19%
|
|============== | 20%
|
|============== | 21%
|
|=============== | 21%
|
|=============== | 22%
|
|================ | 22%
|
|================ | 23%
|
|================ | 24%
|
|================= | 24%
|
|================= | 25%
|
|================== | 25%
|
|================== | 26%
|
|=================== | 26%
|
|=================== | 27%
|
|=================== | 28%
|
|==================== | 28%
|
|==================== | 29%
|
|===================== | 29%
|
|===================== | 30%
|
|===================== | 31%
|
|====================== | 31%
|
|====================== | 32%
|
|======================= | 32%
|
|======================= | 33%
|
|======================= | 34%
|
|======================== | 34%
|
|======================== | 35%
|
|========================= | 35%
|
|========================= | 36%
|
|========================== | 36%
|
|========================== | 37%
|
|========================== | 38%
|
|=========================== | 38%
|
|=========================== | 39%
|
|============================ | 39%
|
|============================ | 40%
|
|============================ | 41%
|
|============================= | 41%
|
|============================= | 42%
|
|============================== | 42%
|
|============================== | 43%
|
|============================== | 44%
|
|=============================== | 44%
|
|=============================== | 45%
|
|================================ | 45%
|
|================================ | 46%
|
|================================= | 46%
|
|================================= | 47%
|
|================================= | 48%
|
|================================== | 48%
|
|================================== | 49%
|
|=================================== | 49%
|
|=================================== | 50%
|
|=================================== | 51%
|
|==================================== | 51%
|
|==================================== | 52%
|
|===================================== | 52%
|
|===================================== | 53%
|
|===================================== | 54%
|
|====================================== | 54%
|
|====================================== | 55%
|
|======================================= | 55%
|
|======================================= | 56%
|
|======================================== | 56%
|
|======================================== | 57%
|
|======================================== | 58%
|
|========================================= | 58%
|
|========================================= | 59%
|
|========================================== | 59%
|
|========================================== | 60%
|
|========================================== | 61%
|
|=========================================== | 61%
|
|=========================================== | 62%
|
|============================================ | 62%
|
|============================================ | 63%
|
|============================================ | 64%
|
|============================================= | 64%
|
|============================================= | 65%
|
|============================================== | 65%
|
|============================================== | 66%
|
|=============================================== | 66%
|
|=============================================== | 67%
|
|=============================================== | 68%
|
|================================================ | 68%
|
|================================================ | 69%
|
|================================================= | 69%
|
|================================================= | 70%
|
|================================================= | 71%
|
|================================================== | 71%
|
|================================================== | 72%
|
|=================================================== | 72%
|
|=================================================== | 73%
|
|=================================================== | 74%
|
|==================================================== | 74%
|
|==================================================== | 75%
|
|===================================================== | 75%
|
|===================================================== | 76%
|
|====================================================== | 76%
|
|====================================================== | 77%
|
|====================================================== | 78%
|
|======================================================= | 78%
|
|======================================================= | 79%
|
|======================================================== | 79%
|
|======================================================== | 80%
|
|======================================================== | 81%
|
|========================================================= | 81%
|
|========================================================= | 82%
|
|========================================================== | 82%
|
|========================================================== | 83%
|
|========================================================== | 84%
|
|=========================================================== | 84%
|
|=========================================================== | 85%
|
|============================================================ | 85%
|
|============================================================ | 86%
|
|============================================================= | 86%
|
|============================================================= | 87%
|
|============================================================= | 88%
|
|============================================================== | 88%
|
|============================================================== | 89%
|
|=============================================================== | 89%
|
|=============================================================== | 90%
|
|=============================================================== | 91%
|
|================================================================ | 91%
|
|================================================================ | 92%
|
|================================================================= | 92%
|
|================================================================= | 93%
|
|================================================================= | 94%
|
|================================================================== | 94%
|
|================================================================== | 95%
|
|=================================================================== | 95%
|
|=================================================================== | 96%
|
|==================================================================== | 96%
|
|==================================================================== | 97%
|
|==================================================================== | 98%
|
|===================================================================== | 98%
|
|===================================================================== | 99%
|
|======================================================================| 99%
|
|======================================================================| 100%
# Produce plots for the Bayesian Clustering Model
<- plot(tip1) tip_plots
# View the posterior distribution of the number of clusters
$histogram_posterior_number_of_clusters tip_plots
# View the trace plot with respect to the posterior number of clusters
$trace_plot_posterior_number_of_clusters tip_plots
# Extract posterior cluster assignments using the Posterior Expected Adjusted Rand (PEAR) index
<- mcclust::maxpear(psm = tip1@posterior_similarity_matrix)$cl
cluster_assignments
# If the true labels are available, then show the cluster result via a contigency table
table(data.frame(true_label = true_labels,
cluster_assignment = cluster_assignments))
#> cluster_assignment
#> true_label 1 2 3 4
#> Cluster 1 7 3 0 0
#> Cluster 2 0 0 10 0
#> Cluster 3 0 0 0 10
# Create the one component graph with minimum entropy
<- partition_undirected_graph(.graph_matrix = tip1@posterior_similarity_matrix,
partition_list .num_components = 1,
.step_size = 0.001)
# Associate class labels and colors for the plot
<- c("Cluster 1" = "blue",
class_palette_colors "Cluster 2" = 'green',
"Cluster 3" = "red")
# Associate class labels and shapes for the plot
<- c("Cluster 1" = 19,
class_palette_shapes "Cluster 2" = 18,
"Cluster 3" = 17)
# Visualize the posterior similarity matrix by constructing a graph plot of
# the one-cluster graph. The true labels are used here (below they are not).
ggnet2_network_plot(.matrix_graph = partition_list$partitioned_graph_matrix,
.subject_names = NA,
.subject_class_names = true_labels,
.class_colors = class_palette_colors,
.class_shapes = class_palette_shapes,
.node_size = 2,
.add_node_labels = FALSE)
#> Warning: Duplicated override.aes is ignored.
# If true labels are not available, then construct a network plot
# of the one-cluster graph without any class labels.
# Note: Subject labels may be suppressed using .add_node_labels = FALSE.
ggnet2_network_plot(.matrix_graph = partition_list$partitioned_graph_matrix,
.subject_names = names_subjects,
.node_size = 2,
.add_node_labels = TRUE)
These binaries (installable software) and packages are in development.
They may not be fully stable and should be used with caution. We make no claims about them.
Health stats visible at Monitor.