Skip to content

Commit f08502c

Browse files
committed
List size for large data.tables
Fixes #6607
1 parent 08e94b7 commit f08502c

File tree

2 files changed

+8
-1
lines changed

2 files changed

+8
-1
lines changed

R/tables.R

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ type_size = function(DT) {
55
# for speed and ram efficiency, a lower bound by not descending into character string lengths or list items
66
# if a more accurate and higher estimate is needed then user can pass object.size or alternative to mb=
77
# in case number of columns is very large (e.g. 1e6 columns) then we use a for() to avoid allocation of sapply()
8-
ans = 0L
8+
ans = 0
99
lookup = c("raw"=1L, "integer"=4L, "double"=8L, "complex"=16L)
1010
for (i in seq_along(DT)) {
1111
col = DT[[i]]

inst/tests/other.Rraw

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -766,3 +766,10 @@ if (loaded[["nanotime"]]) {
766766
# respect dec=',' for nanotime, related to #6446, corresponding to tests 2281.*
767767
test(31, fwrite(data.table(as.nanotime(.POSIXct(0))), dec=',', sep=';'), output="1970-01-01T00:00:00,000000000Z")
768768
}
769+
770+
# test for bug#6607
771+
local({
772+
DT <- as.data.table(lapply(1:15,function(i) runif(20e6)))
773+
res <- tables()
774+
data.table:::test(32, res[NAME=='DT',.(NAME,NROW,NCOL,MB)], data.table(NAME="DT",NROW=20000000L,NCOL=15L,MB=2288))
775+
})

0 commit comments

Comments
 (0)