@@ -35,7 +35,7 @@ window.addEventListener("load", () => {
35
35
36
36
// Reload plots with the current dandiset ID
37
37
load_over_time_plot ( selected_dandiset ) ;
38
- load_per_asset_histogram ( selected_dandiset ) ;
38
+ load_histogram ( selected_dandiset ) ;
39
39
load_geographic_heatmap ( selected_dandiset ) ;
40
40
} ) ;
41
41
}
@@ -52,7 +52,7 @@ window.addEventListener("load", () => {
52
52
53
53
// Reload plots with the current dandiset ID
54
54
load_over_time_plot ( selected_dandiset ) ;
55
- load_per_asset_histogram ( selected_dandiset ) ;
55
+ load_histogram ( selected_dandiset ) ;
56
56
load_geographic_heatmap ( selected_dandiset ) ;
57
57
} ) ;
58
58
}
@@ -70,7 +70,7 @@ window.addEventListener("load", () => {
70
70
// Reload plots with the current dandiset ID
71
71
update_totals ( selected_dandiset ) ;
72
72
load_over_time_plot ( selected_dandiset ) ;
73
- load_per_asset_histogram ( selected_dandiset ) ;
73
+ load_histogram ( selected_dandiset ) ;
74
74
load_geographic_heatmap ( selected_dandiset ) ;
75
75
} ) ;
76
76
}
@@ -81,29 +81,29 @@ window.addEventListener("resize", resizePlots);
81
81
82
82
function resizePlots ( ) {
83
83
// Select the div elements
84
- const overTimePlot = document . getElementById ( "over_time_plot" ) ;
85
- const perAssetHistogram = document . getElementById ( "per_asset_histogram " ) ;
86
- const geographyHeatmap = document . getElementById ( "geography_heatmap" ) ;
84
+ const over_time_plot = document . getElementById ( "over_time_plot" ) ;
85
+ const histogram = document . getElementById ( "histogram " ) ;
86
+ const geography_heatmap = document . getElementById ( "geography_heatmap" ) ;
87
87
88
88
const dandiset_selector = document . getElementById ( "dandiset_selector" ) ;
89
89
const selected_dandiset = dandiset_selector . value ;
90
90
91
91
// Update their sizes dynamically
92
- if ( overTimePlot ) {
93
- overTimePlot . style . width = "90vw" ;
94
- overTimePlot . style . height = "80vh" ;
95
- Plotly . relayout ( overTimePlot , { width : overTimePlot . offsetWidth , height : overTimePlot . offsetHeight } ) ;
92
+ if ( over_time_plot ) {
93
+ over_time_plot . style . width = "90vw" ;
94
+ over_time_plot . style . height = "80vh" ;
95
+ Plotly . relayout ( over_time_plot , { width : over_time_plot . offsetWidth , height : over_time_plot . offsetHeight } ) ;
96
96
}
97
- if ( selected_dandiset !== "archive" && perAssetHistogram ) {
98
- perAssetHistogram . style . width = "90vw" ;
99
- perAssetHistogram . style . height = "80vh" ;
100
- Plotly . relayout ( perAssetHistogram , { width : perAssetHistogram . offsetWidth , height : perAssetHistogram . offsetHeight } ) ;
97
+ if ( selected_dandiset !== "archive" && histogram ) {
98
+ histogram . style . width = "90vw" ;
99
+ histogram . style . height = "80vh" ;
100
+ Plotly . relayout ( histogram , { width : histogram . offsetWidth , height : histogram . offsetHeight } ) ;
101
101
}
102
- if ( geographyHeatmap ) {
103
- geographyHeatmap . style . width = "90vw" ;
104
- geographyHeatmap . style . height = "80vh" ;
105
- geographyHeatmap . style . margin = "auto" ;
106
- Plotly . relayout ( geographyHeatmap , { width : geographyHeatmap . offsetWidth , height : geographyHeatmap . offsetHeight } ) ;
102
+ if ( geography_heatmap ) {
103
+ geography_heatmap . style . width = "90vw" ;
104
+ geography_heatmap . style . height = "80vh" ;
105
+ geography_heatmap . style . margin = "auto" ;
106
+ Plotly . relayout ( geography_heatmap , { width : geography_heatmap . offsetWidth , height : geography_heatmap . offsetHeight } ) ;
107
107
}
108
108
}
109
109
@@ -183,15 +183,15 @@ fetch(ALL_DANDISET_TOTALS_URL)
183
183
// Load the plot for the first ID by default
184
184
update_totals ( "archive" ) ;
185
185
load_over_time_plot ( "archive" ) ;
186
- load_per_asset_histogram ( "archive" ) ;
186
+ load_histogram ( "archive" ) ;
187
187
load_geographic_heatmap ( "archive" ) ;
188
188
189
189
// Update the plots when a new Dandiset ID is selected
190
190
selector . addEventListener ( "change" , ( event ) => {
191
191
const target = event . target ;
192
192
update_totals ( target . value ) ;
193
193
load_over_time_plot ( target . value ) ;
194
- load_per_asset_histogram ( target . value ) ;
194
+ load_histogram ( target . value ) ;
195
195
load_geographic_heatmap ( target . value ) ;
196
196
} ) ;
197
197
} )
@@ -215,7 +215,8 @@ function update_totals(dandiset_id) {
215
215
const human_readable_bytes_sent = format_bytes ( totals . total_bytes_sent ) ;
216
216
//totals_element.innerText = `Totals: ${human_readable_bytes_sent} sent to ?(WIP)? unique requesters from
217
217
// ${totals.number_of_unique_regions} regions of ${totals.number_of_unique_countries} countries.`;
218
- totals_element . innerHTML = `A total of ${ human_readable_bytes_sent } was sent to ${ totals . number_of_unique_regions } regions across ${ totals . number_of_unique_countries } countries. <sup>*</sup>` ;
218
+ header = `A total of ${ human_readable_bytes_sent } was sent to ${ totals . number_of_unique_regions } regions across ${ totals . number_of_unique_countries } countries. <sup>*</sup>`
219
+ totals_element . innerHTML = dandiset_id != "undetermined" ? header : header + `<br>However, the activity could not be uniquely associated with a particular Dandiset.<br>This can occur if the same file exists within more than one Dandiset at a time.`
219
220
220
221
// Add the footnote
221
222
const footnote = document . createElement ( "div" ) ;
@@ -331,21 +332,100 @@ function load_over_time_plot(dandiset_id) {
331
332
} ) ;
332
333
}
333
334
334
- // Function to fetch and render histogram over asset IDs
335
- function load_per_asset_histogram ( dandiset_id ) {
336
- const plot_element_id = "per_asset_histogram" ;
337
- let by_asset_summary_tsv_url = "" ;
335
+ // Function to fetch and render histogram over asset or Dandiset IDs
336
+ function load_histogram ( dandiset_id ) {
337
+ let by_asset_summary_tsv_url , dandiset_totals_json_url ;
338
338
339
339
// Suppress div element content if 'archive' is selected
340
- if ( dandiset_id === "archive " ) {
341
- const plot_element = document . getElementById ( plot_element_id ) ;
340
+ if ( dandiset_id === "undetermined " ) {
341
+ const plot_element = document . getElementById ( "histogram" ) ;
342
342
if ( plot_element ) {
343
343
plot_element . innerText = "" ;
344
344
}
345
345
return "" ;
346
+ } if ( dandiset_id === "archive" ) {
347
+ load_dandiset_histogram ( )
346
348
} else {
347
349
by_asset_summary_tsv_url = `${ BASE_TSV_URL } /${ dandiset_id } /by_asset.tsv` ;
350
+ load_per_asset_histogram ( by_asset_summary_tsv_url ) ;
348
351
}
352
+ }
353
+
354
+ function load_dandiset_histogram ( ) {
355
+ const plot_element_id = "histogram" ;
356
+
357
+ fetch ( ALL_DANDISET_TOTALS_URL )
358
+ . then ( ( response ) => {
359
+ if ( ! response . ok ) {
360
+ throw new Error ( `Failed to fetch JSON file: ${ response . statusText } ` ) ;
361
+ }
362
+ return response . json ( ) ;
363
+ } )
364
+ . then ( ( data ) => {
365
+ // Exclude 'archive' and cast IDs to strings
366
+ const combined = Object . keys ( data )
367
+ . map ( dandiset_id => ( {
368
+ dandiset_id : "Dandiset ID " + String ( dandiset_id ) ,
369
+ bytes : data [ dandiset_id ] . total_bytes_sent
370
+ } ) )
371
+ . sort ( ( a , b ) => b . bytes - a . bytes ) ;
372
+
373
+ const sorted_dandiset_ids = combined . map ( item => item . dandiset_id ) ;
374
+ const sorted_bytes_sent = combined . map ( item => item . bytes ) ;
375
+ const human_readable_bytes_sent = sorted_bytes_sent . map ( bytes => format_bytes ( bytes ) ) ;
376
+
377
+ const plot_data = [
378
+ {
379
+ type : "bar" ,
380
+ x : sorted_dandiset_ids ,
381
+ y : sorted_bytes_sent ,
382
+ text : sorted_dandiset_ids . map ( ( dandiset_id , index ) => `${ dandiset_id } <br>${ human_readable_bytes_sent [ index ] } ` ) ,
383
+ textposition : "none" ,
384
+ hoverinfo : "text" ,
385
+ }
386
+ ] ;
387
+
388
+ const layout = {
389
+ bargap : 0 ,
390
+ title : {
391
+ text : `Bytes sent per Dandiset` ,
392
+ font : { size : 24 }
393
+ } ,
394
+ xaxis : {
395
+ title : {
396
+ text : "(hover over an entry for Dandiset IDs)" ,
397
+ font : { size : 16 }
398
+ } ,
399
+ showticklabels : false ,
400
+ } ,
401
+ yaxis : {
402
+ title : {
403
+ text : USE_LOG_SCALE ? "Bytes (log scale)" : "Bytes" ,
404
+ font : { size : 16 }
405
+ } ,
406
+ type : USE_LOG_SCALE ? "log" : "linear" ,
407
+ tickformat : USE_LOG_SCALE ? "" : "~s" ,
408
+ ticksuffix : USE_LOG_SCALE ? "" : "B" ,
409
+ tickvals : USE_LOG_SCALE ? [ 1000 , 1000000 , 1000000000 , 1000000000000 , 1000000000000000 , 1000000000000000000 ] : null ,
410
+ ticktext : USE_LOG_SCALE ? [ "KB" , "MB" , "GB" , "TB" ] : null
411
+ } ,
412
+ } ;
413
+
414
+ Plotly . newPlot ( plot_element_id , plot_data , layout ) ;
415
+ } )
416
+ . catch ( ( error ) => {
417
+ console . error ( "Error:" , error ) ;
418
+ const plot_element = document . getElementById ( plot_element_id ) ;
419
+ if ( plot_element ) {
420
+ while ( plot_element . firstChild ) {
421
+ plot_element . removeChild ( plot_element . firstChild ) ;
422
+ }
423
+ }
424
+ } ) ;
425
+ }
426
+
427
+ function load_per_asset_histogram ( by_asset_summary_tsv_url ) {
428
+ const plot_element_id = "histogram" ;
349
429
350
430
fetch ( by_asset_summary_tsv_url )
351
431
. then ( ( response ) => {
@@ -363,12 +443,9 @@ function load_per_asset_histogram(dandiset_id) {
363
443
const data = rows . slice ( 1 ) . map ( ( row ) => row . split ( "\t" ) ) ;
364
444
365
445
const asset_names = data . map ( ( row ) => {
366
- const filename = row [ 0 ] . split ( "/" ) . at ( - 1 ) ;
367
- const suffix = filename . split ( "." ) . at ( - 1 ) ;
368
-
369
- if ( suffix !== "nwb" && suffix !== "mp4" && suffix !== "avi" ) {
370
- throw new Error ( "Currently only supports NWB files." ) ;
371
- }
446
+ let suffix , filename ;
447
+ suffix = row [ 0 ] . split ( "." ) . at ( - 1 ) ;
448
+ filename = suffix === "nwb" ? row [ 0 ] . split ( "/" ) . at ( - 1 ) : row [ 0 ] ;
372
449
373
450
return filename ;
374
451
} ) ;
0 commit comments