Skip to content

Commit

Permalink
Merge pull request #958 from wadpac/issue571_logIdleSleepModeActigraph
Browse files Browse the repository at this point in the history
Issue571 log idle sleep mode actigraph
  • Loading branch information
vincentvanhees authored Nov 5, 2023
2 parents 3a62d29 + 38bf6c7 commit 1f97ca5
Show file tree
Hide file tree
Showing 11 changed files with 133 additions and 70 deletions.
6 changes: 3 additions & 3 deletions NEWS.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@

- Part 2: Bug fix in csv report generation when appending part 2 milestone data with differing number of columns #951

- Part 6: Added as a new part
- Part 1: Bug fix in gt3x data reading by avoiding duplication of seconds when appending chunks of data #952

- Part 6: Expanded with Household co-analysis
- Part 1 + 2: Log information regarding number of time gaps and total time imputed with function g.imputeTimeGaps #571

- Part 6: Expanded with Cosinor analysis
- Part 6: Added as a new part with for now only Household co-analysis and Cosinor analysis

# CHANGES IN GGIR VERSION 3.0-0

Expand Down
79 changes: 47 additions & 32 deletions R/g.analyse.R
Original file line number Diff line number Diff line change
Expand Up @@ -296,52 +296,67 @@ g.analyse = function(I, C, M, IMP, params_247 = c(), params_phyact = c(),
startt = startt)

if (!is.null(M$QClog)) {
# Summarise the QC log (currently only expected from cwa Axivity files)
# Summarise the QC log (currently only expected from cwa Axivity, actigraph, and csv files)
QCsummarise = function(QClog, wx) {
x = ifelse(test = length(wx) > 0,
yes = sum(QClog$end[wx] - QClog$start[wx]) / 60,
no = 0)
return(x)
}
# total imputation
impdone = which(M$QClog$imputed == TRUE)
file_summary$Dur_imputed = QCsummarise(M$QClog, impdone)
file_summary$Nblocks_imputed = length(impdone)
if ("imputed" %in% colnames(M$QClog)) {
impdone = which(M$QClog$imputed == TRUE)
if (any(colnames(M$QClog) == "timegaps_min")) {
file_summary$Dur_imputed = sum(M$QClog$timegaps_min)
file_summary$Nblocks_imputed = sum(M$QClog$timegaps_n)
} else {
file_summary$Dur_imputed = QCsummarise(M$QClog, impdone)
file_summary$Nblocks_imputed = length(impdone)
}
}

# checksum
chsum_failed = which(M$QClog$checksum_pass == FALSE)
file_summary$Dur_chsum_failed = QCsummarise(M$QClog, chsum_failed)
file_summary$Nblocks_chsum_failed = length(chsum_failed)
if ("checksum_pass" %in% colnames(M$QClog)) {
chsum_failed = which(M$QClog$checksum_pass == FALSE)
file_summary$Dur_chsum_failed = QCsummarise(M$QClog, chsum_failed)
file_summary$Nblocks_chsum_failed = length(chsum_failed)

}

# nonincremental block ID
nonincremental = which(M$QClog$blockID_current - M$QClog$blockID_next != 1)
file_summary$Dur_nonincremental = QCsummarise(M$QClog, nonincremental)
file_summary$Nblocks_nonincremental = length(nonincremental)
if ("blockID_current" %in% colnames(M$QClog)) {
nonincremental = which(M$QClog$blockID_current - M$QClog$blockID_next != 1)
file_summary$Dur_nonincremental = QCsummarise(M$QClog, nonincremental)
file_summary$Nblocks_nonincremental = length(nonincremental)
}

# sampling frequency issues
freqBlockHead = M$QClog$frequency_blockheader
frequency_bias = abs(M$QClog$frequency_observed - freqBlockHead) / freqBlockHead

freqissue = which(frequency_bias >= 0.05 & frequency_bias < 0.1)
file_summary$Dur_freqissue_5_10 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_5_10 = length(freqissue)

freqissue = which(frequency_bias >= 0.1 & frequency_bias < 0.2)
file_summary$Dur_freqissue_10_20 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_10_20 = length(freqissue)

freqissue = which(frequency_bias >= 0.2 & frequency_bias < 0.3)
file_summary$Dur_freqissue_20_30 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_20_30 = length(freqissue)

freqissue = which(frequency_bias >= 0.3)
file_summary$Dur_freqissue_30 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_30 = length(freqissue)
if ("frequency_blockheader" %in% colnames(M$QClog)) {
freqBlockHead = M$QClog$frequency_blockheader
frequency_bias = abs(M$QClog$frequency_observed - freqBlockHead) / freqBlockHead
}
if ("frequency_bias" %in% colnames(M$QClog)) {
freqissue = which(frequency_bias >= 0.05 & frequency_bias < 0.1)
file_summary$Dur_freqissue_5_10 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_5_10 = length(freqissue)

freqissue = which(frequency_bias >= 0.1 & frequency_bias < 0.2)
file_summary$Dur_freqissue_10_20 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_10_20 = length(freqissue)

freqissue = which(frequency_bias >= 0.2 & frequency_bias < 0.3)
file_summary$Dur_freqissue_20_30 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_20_30 = length(freqissue)

freqissue = which(frequency_bias >= 0.3)
file_summary$Dur_freqissue_30 = QCsummarise(M$QClog, freqissue)
file_summary$Nblock_freqissue_30 = length(freqissue)
}
}

metrics_nav = list(lookat = lookat,
colnames_to_lookat = colnames_to_lookat,
longitudinal_axis_id = longitudinal_axis_id)
colnames_to_lookat = colnames_to_lookat,
longitudinal_axis_id = longitudinal_axis_id)
output_perfile = g.analyse.perfile(I, C, metrics_nav,
AveAccAve24hr,
doquan, doiglevels, tooshort,
Expand All @@ -352,7 +367,7 @@ g.analyse = function(I, C, M, IMP, params_247 = c(), params_phyact = c(),
output_perday = output_perday,
dataqual_summary = dataqual_summary,
file_summary = file_summary)

filesummary = output_perfile$filesummary
daysummary = output_perfile$daysummary

Expand Down
40 changes: 25 additions & 15 deletions R/g.analyse.perfile.R
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,9 @@ g.analyse.perfile = function(I, C, metrics_nav,
s_names[vi:(vi + q0)] = c("calib_err",
"calib_status", colnames_to_lookat)
vi = vi + q0 + 2
# readAxivity QClog summary
if ("Dur_imputed" %in% names(file_summary)) {
# QClog summary
if ("Dur_chsum_failed" %in% names(file_summary)) {
# readAxivity QClog
# These are summaries of the file health check by the GGIRread::readAxivity
# the function handles data blocks (1-3 seconds) with faulty data by imputing
# them and logging the information.
Expand Down Expand Up @@ -101,6 +102,15 @@ g.analyse.perfile = function(I, C, metrics_nav,
"filehealth_fbias2030_N",
"filehealth_fbias30_N")
vi = vi + 7
} else if ("Dur_imputed" %in% names(file_summary)) {
# ActiGraph QClog
# This also logs time gaps in ActiGraph files, which correspond with periods
# in which the idle sleep mode has been activated
filesummary[vi:(vi + 1)] = c(file_summary$Dur_imputed, # total imputed
file_summary$Nblocks_imputed)
s_names[vi:(vi + 1)] = c("filehealth_totimp_min",
"filehealth_totimp_N")
vi = vi + 2
}

#quantile, ML5, and intensity gradient variables
Expand Down Expand Up @@ -166,20 +176,20 @@ g.analyse.perfile = function(I, C, metrics_nav,
"cosinor_acrotime", "cosinor_ndays", "cosinor_R2")
vi = vi + 6
try(expr = {filesummary[vi:(vi + 10)] = c(cosinor_coef$coefext$params$minimum,
cosinor_coef$coefext$params$amp,
cosinor_coef$coefext$params$alpha,
cosinor_coef$coefext$params$beta,
cosinor_coef$coefext$params$acrotime,
cosinor_coef$coefext$params$UpMesor,
cosinor_coef$coefext$params$DownMesor,
cosinor_coef$coefext$params$MESOR,
cosinor_coef$coefext$params$ndays,
cosinor_coef$coefext$params$F_pseudo,
cosinor_coef$coefext$params$R2)}, silent = TRUE)
cosinor_coef$coefext$params$amp,
cosinor_coef$coefext$params$alpha,
cosinor_coef$coefext$params$beta,
cosinor_coef$coefext$params$acrotime,
cosinor_coef$coefext$params$UpMesor,
cosinor_coef$coefext$params$DownMesor,
cosinor_coef$coefext$params$MESOR,
cosinor_coef$coefext$params$ndays,
cosinor_coef$coefext$params$F_pseudo,
cosinor_coef$coefext$params$R2)}, silent = TRUE)
s_names[vi:(vi + 10)] = c("cosinorExt_minimum", "cosinorExt_amp", "cosinorExt_alpha",
"cosinorExt_beta", "cosinorExt_acrotime", "cosinorExt_UpMesor",
"cosinorExt_DownMesor", "cosinorExt_MESOR",
"cosinorExt_ndays", "cosinorExt_F_pseudo", "cosinorExt_R2")
"cosinorExt_beta", "cosinorExt_acrotime", "cosinorExt_UpMesor",
"cosinorExt_DownMesor", "cosinorExt_MESOR",
"cosinorExt_ndays", "cosinorExt_F_pseudo", "cosinorExt_R2")
vi = vi + 11
filesummary[vi:(vi + 1)] = c(cosinor_coef$IVIS$InterdailyStability,
cosinor_coef$IVIS$IntradailyVariability)
Expand Down
2 changes: 1 addition & 1 deletion R/g.calibrate.R
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ g.calibrate = function(datafile, params_rawdata = c(),
zeros = which(data[,1] == 0 & data[,2] == 0 & data[,3] == 0)
if ((mon == MONITOR$ACTIGRAPH && dformat == FORMAT$CSV) || length(zeros) > 0) {
data = g.imputeTimegaps(x = as.data.frame(data), xyzCol = 1:3, timeCol = c(), sf = sf, impute = FALSE)
data = as.matrix(data)
data = as.matrix(data$x)
}
LD = nrow(data)
#store data that could not be used for this block, but will be added to next block
Expand Down
4 changes: 4 additions & 0 deletions R/g.getmeta.R
Original file line number Diff line number Diff line change
Expand Up @@ -284,6 +284,8 @@ g.getmeta = function(datafile, params_metrics = c(), params_rawdata = c(),
PreviousLastValue = PreviousLastValue,
PreviousLastTime = PreviousLastTime,
epochsize = c(ws3, ws2))
QClog = rbind(QClog, P$QClog)
P = P$x
PreviousLastValue = as.numeric(P[nrow(P), xyzCol])
if (is.null(timeCol)) PreviousLastTime = NULL else PreviousLastTime = as.POSIXct(P[nrow(P), timeCol])
}
Expand Down Expand Up @@ -320,6 +322,8 @@ g.getmeta = function(datafile, params_metrics = c(), params_rawdata = c(),
PreviousLastValue = PreviousLastValue,
PreviousLastTime = PreviousLastTime,
epochsize = c(ws3, ws2))
QClog = rbind(QClog, P$QClog)
P = P$x
PreviousLastValue = as.numeric(P[nrow(P), c("X", "Y", "Z")])
PreviousLastTime = as.POSIXct(P[nrow(P), "time"])
}
Expand Down
17 changes: 16 additions & 1 deletion R/g.imputeTimegaps.R
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ g.imputeTimegaps = function(x, xyzCol, timeCol = c(), sf, k=0.25, impute = TRUE,
}
# dummy variables to control the process
remove_time_at_end = dummyTime = FirstRowZeros = imputelast = FALSE
# initialize numberofgaps and GapsLength
NumberOfGaps = GapsLength = NULL
# add temporary timecolumn to enable timegap imputation where there are zeros
if (length(timeCol) == 1) {
if (!(timeCol %in% colnames(x))) dummyTime = TRUE
Expand Down Expand Up @@ -102,6 +104,7 @@ g.imputeTimegaps = function(x, xyzCol, timeCol = c(), sf, k=0.25, impute = TRUE,
if (NumberOfGaps > 0) {
x$gap = 1
x$gap[gapsi] = round(deltatime[gapsi] * sf) # as.integer was problematic many decimals close to wholenumbers (but not whole numbers) resulting in 1 row less than expected
GapsLength = sum(x$gap[gapsi]) - NumberOfGaps # - numberOfGaps because x$gap == 1 means no gap
# normalisation to 1 G
normalise = which(x$gap > 1)
for (i_normalise in normalise) {
Expand Down Expand Up @@ -183,8 +186,20 @@ g.imputeTimegaps = function(x, xyzCol, timeCol = c(), sf, k=0.25, impute = TRUE,
if (remove_time_at_end == TRUE) {
x = x[, grep(pattern = "time", x = colnames(x), invert = TRUE)]
}
# keep only timestamp column
if (all(c("time", "timestamp") %in% colnames(x))) {
x = x[, grep(pattern = "timestamp", x = colnames(x), invert = TRUE)]
}
return(x)
# QClog
start = as.numeric(as.POSIXct(x[1,1], origin = "1970-1-1"))
end = start + nrow(x)
if (is.null(GapsLength)) GapsLength = 0
if (is.null(NumberOfGaps)) NumberOfGaps = 0
imputed = NumberOfGaps > 0
QClog = data.frame(imputed = imputed,
start = start, end = end,
blockLengthSeconds = (end - start) / sf,
timegaps_n = NumberOfGaps, timegaps_min = GapsLength/sf/60)
# return data and QClog
return(list(x = x, QClog = QClog))
}
4 changes: 2 additions & 2 deletions R/g.readaccfile.R
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ g.readaccfile = function(filename, blocksize, blocknumber, filequality,
# endpage and the blocksize.
if (blocknumber != 1 & length(PreviousEndPage) != 0) {
# if ((mon == MONITOR$GENEACTIV && dformat == FORMAT$BIN) || dformat == FORMAT$CSV) { # change this line as the csv data do not need to skip one more row (the skip argument in read.csv does not include this row of the dataset)
if (mon == MONITOR$GENEACTIV && dformat == FORMAT$BIN) {
# only in GENEActiv binary data and for csv format data
if ((mon == MONITOR$GENEACTIV && dformat == FORMAT$BIN) | dformat == FORMAT$GT3X) {
# only in GENEActiv binary data and for gt3x format data
# page selection is defined from start to end (including end)
startpage = PreviousEndPage + 1
} else {
Expand Down
1 change: 1 addition & 0 deletions R/read.myacc.csv.R
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,7 @@ read.myacc.csv = function(rmc.file=c(), rmc.nrow=Inf, rmc.skip=c(), rmc.dec=".",
P = g.imputeTimegaps(P, xyzCol = c("accx", "accy", "accz"), timeCol = "timestamp", sf = sf, k = 0.25,
PreviousLastValue = PreviousLastValue,
PreviousLastTime = PreviousLastTime, epochsize = NULL)
P = P$x
PreviousLastValue = as.numeric(P[nrow(P), c("accx", "accy", "accz")])
PreviousLastTime = as.POSIXct(P[nrow(P), "timestamp"])
}
Expand Down
5 changes: 4 additions & 1 deletion man/g.imputeTimegaps.Rd
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,11 @@
}
}
\value{
Data.frame based on input x with timegaps imputed (as default) or with
List including:
- x, data.frame based on input x with timegaps imputed (as default) or with
recordings with 0 values in the three axes removed (if impute = FALSE)
- QClog, data.frame with information on the number of time gaps found and the
total time imputed in minutes
}
\author{
Vincent T van Hees <v.vanhees@accelting.com>
Expand Down
19 changes: 19 additions & 0 deletions tests/testthat/test_imputeTimegaps.R
Original file line number Diff line number Diff line change
Expand Up @@ -17,19 +17,25 @@ test_that("timegaps are correctly imputed", {
# Format 1: with timestamp & with timegaps (no zeroes, incomplete dataset)
x1 = x[-zeros,]
x1_imputed = g.imputeTimegaps(x1, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = TRUE, PreviousLastValue = c(0,0,1))
x1_imputed_QClog = x1_imputed$QClog; x1_imputed = x1_imputed$x
x1_removed = g.imputeTimegaps(x1, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = FALSE, PreviousLastValue = c(0,0,1))
x1_removed_QClog = x1_removed$QClog; x1_removed = x1_removed$x

# Format 2: with timestamp & with zeros (complete dataset)
x2 = x
x2[zeros, xyzCol] = 0
x2_imputed = g.imputeTimegaps(x2, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = TRUE, PreviousLastValue = c(0,0,1))
x2_imputed_QClog = x2_imputed$QClog; x2_imputed = x2_imputed$x
x2_removed = g.imputeTimegaps(x2, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = FALSE, PreviousLastValue = c(0,0,1))
x2_removed_QClog = x2_removed$QClog; x2_removed = x2_removed$x

# Format 3: without timestamp & with zeros (complete dataset)
x3 = x_without_time
x3[zeros, xyzCol] = 0
x3_imputed = g.imputeTimegaps(x3, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = TRUE, PreviousLastValue = c(0,0,1))
x3_imputed_QClog = x3_imputed$QClog; x3_imputed = x3_imputed$x
x3_removed = g.imputeTimegaps(x3, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = FALSE, PreviousLastValue = c(0,0,1))
x3_removed_QClog = x3_removed$QClog; x3_removed = x3_removed$x

# tests number of rows
expect_equal(nrow(x1_imputed), N)
Expand All @@ -47,15 +53,26 @@ test_that("timegaps are correctly imputed", {
expect_equal(x1_removed$X, x2_removed$X)
expect_equal(x1_removed$X, x3_removed$X)

# test QClog
expect_equal(x1_imputed_QClog$timegaps_n, 4)
expect_equal(x2_imputed_QClog$timegaps_n, 4)
expect_equal(x3_imputed_QClog$timegaps_n, 4)

expect_equal(x1_imputed_QClog$timegaps_min, length(zeros)/sf/60)
expect_equal(x2_imputed_QClog$timegaps_min, length(zeros)/sf/60)
expect_equal(x3_imputed_QClog$timegaps_min, length(zeros)/sf/60)

# TEST IMPUTATION WHEN FIRST ROW IS NOT CONSECUTIVE TO PREVIOUS CHUNK ----
# Format 4: with timestamp & with timegaps (no zeroes, incomplete dataset)
x4 = x[-zeros,]
PreviousLastTime = x[1,"time"] - 30 # dummy gap of 30 seconds between chunks
suppressWarnings({ # warning arising from made up PreviousLastTime
x4_imputed = g.imputeTimegaps(x4, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = TRUE,
PreviousLastValue = c(0,0,1), PreviousLastTime = PreviousLastTime)
x4_imputed_QClog = x4_imputed$QClog; x4_imputed = x4_imputed$x
x4_removed = g.imputeTimegaps(x4, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = FALSE,
PreviousLastValue = c(0,0,1), PreviousLastTime = PreviousLastTime)
x4_removed_QClog = x4_removed$QClog; x4_removed = x4_removed$x
})

expect_equal(nrow(x4_imputed), N + sf*30)
Expand All @@ -67,7 +84,9 @@ test_that("timegaps are correctly imputed", {
x5 = x
x5[zeros, xyzCol] = 0
x5_imputed = g.imputeTimegaps(x5, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = TRUE, PreviousLastValue = c(0,0,1))
x5_imputed_QClog = x5_imputed$QClog; x5_imputed = x5_imputed$x
x5_removed = g.imputeTimegaps(x5, xyzCol, timeCol = "time", sf = sf, k = 2/sf, impute = FALSE, PreviousLastValue = c(0,0,1))
x5_removed_QClog = x5_removed$QClog; x5_removed = x5_removed$x

expect_equal(nrow(x5_imputed), N)
expect_equal(nrow(x5_removed), N - length(zeros))
Expand Down
Loading

0 comments on commit 1f97ca5

Please sign in to comment.