Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
477 changes: 477 additions & 0 deletions lmg/0401_gold163_all_soma_sort/gold163_consensus_features.nfb

Large diffs are not rendered by default.

65 changes: 65 additions & 0 deletions lmg/3DIQ_tosubset_v3.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
library(TTR)
library(EBImage)

load('shiny_app/subsetdata_v2.Rdata')
load('shiny_app/groupsdf.Rdata')
ids <- sapply(strsplit(as.character(groupsdf$paths),'/'), "[", 8)

data <- my_data

data$Correlation <- NULL
data$FocusScore <- NA
data$LocalFocusScore <- NULL
data$MADIntensity <- NA
data$MaxIntensity <- NA
data$MeanIntensity <- NA
data$MedianIntensity <- NA
data$MinIntensity <- NA
data$PercentMaximal <- NA
data$PercentMinimal <- NA
data$PowerLogLogSlope <- NULL
data$StdIntensity <- NA
data$ThresholdOtsu <- NA
data$TotalArea <- NULL
data$TotalIntensity <- NULL
data$SNR_mean <- NA
data$CNR_mean <- NA
data$SNR_otsu <- NA
data$CNR_otsu <- NA

for(i in unique(ids))
{
print(i)
# if(is.na(groupsdf$paths[ids==i])==FALSE){
IQ <- read.csv(paste0("../BigNeurongit/Data/gold163_IQ/",i,"/",i,".v3dpbd.ImageQuality.csv"),header=T)
# try(data$Correlation[ids==i] <- mean(as.numeric(unlist(acf(as.numeric(swc$V12),plot=F)[4:8]))[1:5]))
data$FocusScore[ids==i] <- IQ$FocusScore
# try(data$LocalFocusScore[ids==i] <- mean(runSD(as.numeric(swc$V12))/runMean(as.numeric(swc$V12)),na.rm=T))
data$MADIntensity[ids==i] <- IQ$MADIntensity
data$MaxIntensity[ids==i] <- IQ$MaxIntensity
data$MeanIntensity[ids==i] <- IQ$MeanIntensity
data$MedianIntensity[ids==i] <- IQ$MedianIntensity
data$MinIntensity[ids==i] <- IQ$MinIntensity
data$PercentMaximal[ids==i] <- IQ$PercentMaximal
data$PercentMinimal[ids==i] <- IQ$PercentMinimal
# try(spect <- spectrum(as.numeric(swc$V12,na.rm=T),plot=F))
# logm1 <- lm(log(spect$spec) ~ log(spect$freq))
# data$PowerLogLogSlope[ids==i] <- as.numeric(coef(logm1)[2])
data$StdIntensity[ids==i] <-IQ$StdIntensity
data$SNR_mean[ids==i] <- IQ$SNR_mean
data$CNR_mean[ids==i] <- IQ$CNR_mean
data$ThresholdOtsu[ids==i] <- IQ$ThresholdOtsu
data$SNR_otsu[ids==i] <- IQ$SNR_otsu
data$CNR_otsu[ids==i] <- IQ$CNR_otsu
# data$TotalArea[ids==i] <- length(as.numeric(swc$V12))
# data$TotalIntensity[ids==i] <- sum(as.numeric(swc$V12))
# }
}

sum(complete.cases(data))

my_data <- do.call(data.frame, # Replace Inf in data by NA
lapply(data,
function(x) replace(x, is.infinite(x), NA)))

save(my_data,file='shiny_app/subsetdata_v2_3d.Rdata')
165 changes: 165 additions & 0 deletions lmg/3D_image_features.csv

Large diffs are not rendered by default.

75 changes: 75 additions & 0 deletions lmg/3D_int_dists.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
library(TTR)
library(EBImage)

load('shiny_app/subsetdata.Rdata')
load('shiny_app/groupsdf.Rdata')
ids <- sapply(strsplit(as.character(groupsdf$paths),'/'), "[", 8)

data <- my_data

data$Correlation <- NULL
data$FocusScore <- NA
data$LocalFocusScore <- NULL
data$MADIntensity <- NA
data$MaxIntensity <- NA
data$MeanIntensity <- NA
data$SNR_mean <- NA
data$CNR_mean <- NA
data$MedianIntensity <- NA
data$MinIntensity <- NA
data$PercentMaximal <- NA
data$PercentMinimal <- NA
data$PowerLogLogSlope <- NULL
data$StdIntensity <- NA
data$ThresholdOtsu <- NULL
data$SNR_otsu <- NA
data$CNR_otsu <- NA
data$TotalArea <- NULL
data$TotalIntensity <- NULL

for(i in unique(ids))
{
print(i)
im <- readImage(paste0("../BigNeurongit/Data/gold166_wids_vols_tiff/",i,"/",i,".v3dpbd.tiff"))
# swc <- read.csv2(paste0(gsub("gold_163_all_soma_sort_s1","gold_163_all_soma_sort_s1_int",groupsdf$paths[ids==i]),'.missing_branches_int.eswc'),sep=' ',header=F)
# try(data$Correlation[ids==i] <- mean(as.numeric(unlist(acf(im,plot=F)[4:8]))[1:5]))
# "Correlation."
# multiplied = np.dot(self.levels[:, np.newaxis] + 1, self.levels[np.newaxis] + 1)
# repeated = np.tile(multiplied[np.newaxis], (self.nobjects, 1, 1))
# summed = (repeated * self.P).sum(2).sum(1)
# h3 = (summed - self.mux * self.muy) / (self.sigmax * self.sigmay)
# h3[np.isinf(h3)] = 0
data$FocusScore[ids==i] <- sd(im)/mean(im)
# try(data$LocalFocusScore[ids==i] <- mean(runSD(im)/runMean(im),na.rm=T))
data$MADIntensity[ids==i] <- mad(im)
im_max <- max(im)
data$MaxIntensity[ids==i] <- im_max
im_mean <- mean(im)
noise_sd <- sd(im[im < im_mean])
data$MeanIntensity[ids==i] <- im_mean
data$SNR_mean[ids==i] <- mean(im[im > im_mean])/noise_sd
data$CNR_mean[ids==i] <- abs(im_max-im_mean)/noise_sd
data$MedianIntensity[ids==i] <- median(im)
data$MinIntensity[ids==i] <- min(im)
data$PercentMaximal[ids==i] <- sum(im==max(im))/length(im)
data$PercentMinimal[ids==i] <- sum(im==min(im))/length(im)
# spect <- spectrum(im,plot=F)
# logm1 <- lm(log(spect$spec) ~ log(spect$freq))
# data$PowerLogLogSlope[ids==i] <- as.numeric(coef(logm1)[2])
data$StdIntensity[ids==i] <-sd(im)
im_otsu <- otsu(as.Image(as.numeric(im)))
otsu_noise_sd <- sd(im[im < im_otsu])
data$ThresholdOtsu[ids==i] <- im_otsu
data$SNR_otsu[ids==i] <- mean(im[im > im_otsu])/otsu_noise_sd
data$CNR_otsu[ids==i] <- abs(im_max-im_otsu)/otsu_noise_sd
# data$TotalArea[ids==i] <- length(im)
# data$TotalIntensity[ids==i] <- sum(im)
}

sum(complete.cases(data))

my_data <- do.call(data.frame, # Replace Inf in data by NA
lapply(data,
function(x) replace(x, is.infinite(x), NA)))

save(my_data,file='shiny_app/subsetdata3D.Rdata')
88 changes: 88 additions & 0 deletions lmg/3D_int_dists_Hanbo.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# library(TTR)
# library(EBImage)
library(autothresholdr)
library(bioimagetools)

folder <- "../BigNeurongit/Data/gold166_wids_vols_tiff"
ids <- list.dirs(folder, full.names = F, recursive = F)

data <- data.frame(ids=ids)

data$FocusScore <- NA
data$MADIntensity <- NA
data$MaxIntensity <- NA
data$MeanIntensity <- NA
data$SNR_mean <- NA
data$CNR_mean <- NA
data$MedianIntensity <- NA
data$MinIntensity <- NA
data$PercentMaximal <- NA
data$PercentMinimal <- NA
data$StdIntensity <- NA
data$SNR_otsu <- NA
data$CNR_otsu <- NA

for(i in unique(ids))
{
print(i)
# im <- readImage(paste0(folder,"/",i,"/",i,".v3dpbd.tiff"))
im <- readTIF(paste0(folder,"/",i,"/",i,".v3dpbd.tiff"), as.is=T, channels=1)
gc()
im <- as.integer(im)
gc()
# im <- as.numeric(im)*256
# gc()
# im <- as.integer(im)
# gc()
# swc <- read.csv2(paste0(gsub("gold_163_all_soma_sort_s1","gold_163_all_soma_sort_s1_int",groupsdf$paths[ids==i]),'.missing_branches_int.eswc'),sep=' ',header=F)
# try(data$Correlation[ids==i] <- mean(as.numeric(unlist(acf(im,plot=F)[4:8]))[1:5]))
# "Correlation."
# multiplied = np.dot(self.levels[:, np.newaxis] + 1, self.levels[np.newaxis] + 1)
# repeated = np.tile(multiplied[np.newaxis], (self.nobjects, 1, 1))
# summed = (repeated * self.P).sum(2).sum(1)
# h3 = (summed - self.mux * self.muy) / (self.sigmax * self.sigmay)
# h3[np.isinf(h3)] = 0
# try(data$LocalFocusScore[ids==i] <- mean(runSD(im)/runMean(im),na.rm=T))
im_sd <- sd(im)
data$StdIntensity[ids==i] <- im-sd
gc()
data$MADIntensity[ids==i] <- mad(im)
im_max <- max(im)
data$MaxIntensity[ids==i] <- im_max
im_mean <- mean(im)
noise_sd <- sd(im[im < im_mean])
gc()
data$MeanIntensity[ids==i] <- im_mean
data$FocusScore[ids==i] <- im_sd^2/im_mean
gc()
data$SNR_mean[ids==i] <- mean(im[im > im_mean])/noise_sd
data$CNR_mean[ids==i] <- abs(im_max-im_mean)/noise_sd
data$MedianIntensity[ids==i] <- median(im)
data$MinIntensity[ids==i] <- min(im)
data$PercentMaximal[ids==i] <- sum(im==max(im))/length(im)
data$PercentMinimal[ids==i] <- sum(im==min(im))/length(im)
# spect <- spectrum(im,plot=F)
# logm1 <- lm(log(spect$spec) ~ log(spect$freq))
# data$PowerLogLogSlope[ids==i] <- as.numeric(coef(logm1)[2])
im_otsu <- auto_thresh(im,"Otsu")
gc()
# im_otsu <- otsu(im)
otsu_noise_sd <- sd(im[im < im_otsu[1]])
gc()
data$ThresholdOtsu[ids==i] <- im_otsu[1]
data$SNR_otsu[ids==i] <- mean(im[im > im_otsu[1]])/otsu_noise_sd
data$CNR_otsu[ids==i] <- abs(im_max-im_otsu[1])/otsu_noise_sd
# data$TotalArea[ids==i] <- length(im)
# data$TotalIntensity[ids==i] <- sum(im)
rm(im)
gc()
}

# sum(complete.cases(data))

my_data <- do.call(data.frame, # Replace Inf in data by NA
lapply(data,
function(x) replace(x, is.infinite(x), NA)))

save(my_data,file='3D_image_features.Rdata')
write.csv(my_data,file='3D_image_features.csv')
88 changes: 88 additions & 0 deletions lmg/3D_int_dists_standalone.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
# library(TTR)
# library(EBImage)
library(autothresholdr)
library(bioimagetools)

folder <- "../BigNeurongit/Data/gold166_wids_vols_tiff"
ids <- list.dirs(folder, full.names = F, recursive = F)

data <- data.frame(ids=ids)

data$FocusScore <- NA
data$MADIntensity <- NA
data$MaxIntensity <- NA
data$MeanIntensity <- NA
data$SNR_mean <- NA
data$CNR_mean <- NA
data$MedianIntensity <- NA
data$MinIntensity <- NA
data$PercentMaximal <- NA
data$PercentMinimal <- NA
data$StdIntensity <- NA
data$SNR_otsu <- NA
data$CNR_otsu <- NA

for(i in unique(ids))
{
print(i)
# im <- readImage(paste0(folder,"/",i,"/",i,".v3dpbd.tiff"))
im <- readTIF(paste0(folder,"/",i,"/",i,".v3dpbd.tiff"), as.is=T, channels=1)
gc()
im <- as.integer(im)
gc()
# im <- as.numeric(im)*256
# gc()
# im <- as.integer(im)
# gc()
# swc <- read.csv2(paste0(gsub("gold_163_all_soma_sort_s1","gold_163_all_soma_sort_s1_int",groupsdf$paths[ids==i]),'.missing_branches_int.eswc'),sep=' ',header=F)
# try(data$Correlation[ids==i] <- mean(as.numeric(unlist(acf(im,plot=F)[4:8]))[1:5]))
# "Correlation."
# multiplied = np.dot(self.levels[:, np.newaxis] + 1, self.levels[np.newaxis] + 1)
# repeated = np.tile(multiplied[np.newaxis], (self.nobjects, 1, 1))
# summed = (repeated * self.P).sum(2).sum(1)
# h3 = (summed - self.mux * self.muy) / (self.sigmax * self.sigmay)
# h3[np.isinf(h3)] = 0
# try(data$LocalFocusScore[ids==i] <- mean(runSD(im)/runMean(im),na.rm=T))
im_sd <- sd(im)
data$StdIntensity[ids==i] <- im-sd
gc()
data$MADIntensity[ids==i] <- mad(im)
im_max <- max(im)
data$MaxIntensity[ids==i] <- im_max
im_mean <- mean(im)
noise_sd <- sd(im[im < im_mean])
gc()
data$MeanIntensity[ids==i] <- im_mean
data$FocusScore[ids==i] <- im_sd^2/im_mean
gc()
data$SNR_mean[ids==i] <- mean(im[im > im_mean])/noise_sd
data$CNR_mean[ids==i] <- abs(im_max-im_mean)/noise_sd
data$MedianIntensity[ids==i] <- median(im)
data$MinIntensity[ids==i] <- min(im)
data$PercentMaximal[ids==i] <- sum(im==max(im))/length(im)
data$PercentMinimal[ids==i] <- sum(im==min(im))/length(im)
# spect <- spectrum(im,plot=F)
# logm1 <- lm(log(spect$spec) ~ log(spect$freq))
# data$PowerLogLogSlope[ids==i] <- as.numeric(coef(logm1)[2])
im_otsu <- auto_thresh(im,"Otsu")
gc()
# im_otsu <- otsu(im)
otsu_noise_sd <- sd(im[im < im_otsu[1]])
gc()
data$ThresholdOtsu[ids==i] <- im_otsu[1]
data$SNR_otsu[ids==i] <- mean(im[im > im_otsu[1]])/otsu_noise_sd
data$CNR_otsu[ids==i] <- abs(im_max-im_otsu[1])/otsu_noise_sd
# data$TotalArea[ids==i] <- length(im)
# data$TotalIntensity[ids==i] <- sum(im)
rm(im)
gc()
}

# sum(complete.cases(data))

my_data <- do.call(data.frame, # Replace Inf in data by NA
lapply(data,
function(x) replace(x, is.infinite(x), NA)))

save(my_data,file='3D_image_features.Rdata')
write.csv(my_data,file='3D_image_features.csv')
Loading