Skip to content

Commit

Permalink
Merge pull request #63 from poldrack/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
poldrack authored Oct 7, 2019
2 parents 4eb2662 + 794d1f9 commit aecf67e
Show file tree
Hide file tree
Showing 14 changed files with 422 additions and 93 deletions.
1 change: 1 addition & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ jobs:
export NARPS_BASEDIR="/tmp/data"; Rscript /root/project/PredictionMarketAnalyses/DataProcessing.R
export NARPS_BASEDIR="/tmp/data"; Rscript -e 'library(knitr);library(rmarkdown);rmarkdown::render("/root/project/PredictionMarketAnalyses/PM_Analyses.Rmd","html_document", output_dir = "/tmp/data/PredictionMarkets/Figures")'
export NARPS_BASEDIR="/tmp/data"; Rscript -e 'library(knitr);library(rmarkdown);rmarkdown::render("/root/project/PredictionMarketAnalyses/PM_Figures.Rmd","html_document", output_dir = "/tmp/data/PredictionMarkets/Figures")'
no_output_timeout: 1200
- run:
name: create results tarball
command: |
Expand Down
25 changes: 23 additions & 2 deletions ImageAnalyses/AnalyzeMaps.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,9 @@ def mk_overlap_maps(narps, verbose=True):
max_overlap[hyp] = overlap
# clear axis for last space
ax[3, 1].set_axis_off()
plt.savefig(
os.path.join(narps.dirs.dirs['figures'], 'overlap_map.pdf'),
bbox_inches='tight')
plt.savefig(
os.path.join(narps.dirs.dirs['figures'], 'overlap_map.png'),
bbox_inches='tight')
Expand All @@ -125,6 +128,9 @@ def mk_range_maps(narps, dataset='zstat'):
vmax=25,
cut_coords=cut_coords,
axes=ax[i])
plt.savefig(os.path.join(
narps.dirs.dirs['figures'], 'range_map.pdf'),
bbox_inches='tight')
plt.savefig(os.path.join(
narps.dirs.dirs['figures'], 'range_map.png'),
bbox_inches='tight')
Expand All @@ -151,6 +157,9 @@ def mk_std_maps(narps, dataset='zstat'):
vmax=4,
cut_coords=cut_coords,
axes=ax[i])
plt.savefig(os.path.join(
narps.dirs.dirs['figures'], 'std_map.pdf'),
bbox_inches='tight')
plt.savefig(os.path.join(
narps.dirs.dirs['figures'], 'std_map.png'),
bbox_inches='tight')
Expand Down Expand Up @@ -245,7 +254,7 @@ def plot_individual_maps(
axes=ax[ctr])
ctr += 1
plt.savefig(os.path.join(
outdir, '%s.png' % teamID),
outdir, '%s.pdf' % teamID),
bbox_inches='tight')
plt.close(fig)

Expand Down Expand Up @@ -353,6 +362,10 @@ def mk_correlation_maps_unthresh(
vmax=1)
plt.title('H%d:' % hyp+hypotheses_full[hyp])
cc_unthresh[hyp] = (cc, labels)
plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'hyp%d_%s_map_unthresh.pdf' % (hyp, corr_type)),
bbox_inches='tight')
plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'hyp%d_%s_map_unthresh.png' % (hyp, corr_type)),
Expand Down Expand Up @@ -512,6 +525,10 @@ def analyze_clusters(
axes=ax[j])
log_to_file(logfile, '')
log_to_file(logfile, '')
plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'hyp%d_cluster_means.pdf' % hyp),
bbox_inches='tight')
plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'hyp%d_cluster_means.png' % hyp),
Expand Down Expand Up @@ -589,7 +606,7 @@ def plot_distance_from_mean(narps):
median_corr_df.median_corr)
plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'median_corr_sorted.png'),
'median_corr_sorted.pdf'),
bbox_inches='tight')
plt.close()

Expand Down Expand Up @@ -662,6 +679,10 @@ def get_thresh_similarity(narps, dataset='resampled'):
figsize=(16, 16),
method='ward')
plt.title(hypotheses_full[hyp])
plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'hyp%d_pctagree_map_thresh.pdf' % hyp),
bbox_inches='tight')
plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'hyp%d_pctagree_map_thresh.png' % hyp),
Expand Down
2 changes: 1 addition & 1 deletion ImageAnalyses/ConsensusAnalysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def mk_figures(narps, logfile, thresh=0.95):

plt.savefig(os.path.join(
narps.dirs.dirs['figures'],
'consensus_map.pdf'))
'consensus_map.pdf'), bbox_inches='tight')
plt.close(fig)


Expand Down
38 changes: 9 additions & 29 deletions ImageAnalyses/DecisionAnalysis.Rmd
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,8 @@ decision_summary <- decision_summary[c('varnum',
'medianConfidence',
'madConfidence')]
write.table(decision_summary,
file=paste(basedir,"figures/Table1.txt",sep='/'))
file=paste(basedir,"figures/Table1.tsv",sep='/'),
sep='\t')
decision_summary
Expand Down Expand Up @@ -216,7 +217,8 @@ cc <- confint(m_hyp_full,parm="beta_",method="Wald")
ctab <- cbind(est=odds_ratios,exp(cc))
kable(ctab)
write.table(ctab,
file=paste(basedir,"figures/OddsRatios.txt",sep='/'))
file=paste(basedir,"figures/OddsRatios.tsv",sep='/'),
sep='\t')
```

Expand Down Expand Up @@ -383,7 +385,7 @@ summary_df <- summary_df %>%
summary_df
write.table(summary_df,
file=paste(basedir,
"figures/ModelingSummaryTable.txt",sep='/'),
"figures/ModelingSummaryTable.tsv",sep='/'),
quote=FALSE,sep='\t', row.names = FALSE)
```

Expand Down Expand Up @@ -737,32 +739,10 @@ df_SuppTable4[5,2:4] = c(anova_testing$`Chisq`[2],
kable(df_SuppTable4)
write.table(df_SuppTable4,
file=paste(basedir,"figures/SuppTable4.txt",sep='/'))
tab_latex = print(xtable(df_SuppTable4,
type = "latex",
digits=c(0,0,2,3,2)),
floating=FALSE,
latex.environments=NULL,
booktabs=TRUE)
latex_preamble=c('\\documentclass[11pt, oneside]{article}',
'\\title{Supplementary Table 4}',
'\\date{}',
'\\usepackage{booktabs}',
'\\begin{document}',
'\\maketitle')
if (TRUE) {
fileConn<-file(paste(basedir,"figures/SuppTable4.tex",sep='/'))
writeLines(c(latex_preamble,tab_latex,c('\\end{document}')), fileConn)
#writeLines(tab_latex, fileConn)
close(fileConn)
# leave this out so that we don't have to install tex within docker
#texi2dvi(paste(basedir,"figures/SuppTable4.tex",sep='/'),
# clean = TRUE)
}
file=paste(basedir,"figures/SuppTable4.tsv",sep='/'),
sep='\t')
```

Expand Down
25 changes: 15 additions & 10 deletions ImageAnalyses/GetMeanSimilarity.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def get_similarity_summary(narps, corrtype='spearman'):
corrvals_triu = corrvals[numpy.triu_indices_from(corrvals, 1)]
corr_summary.append([hypnames[i],
'mean',
corrvals.shape[0],
numpy.mean(corrvals_triu)])
# plot histogram without zeros
plt.figure(figsize=(8, 8))
Expand All @@ -64,6 +65,7 @@ def get_similarity_summary(narps, corrtype='spearman'):
numpy.triu_indices_from(cluster_corrvals, 1)]
corr_summary.append([hypnames[i],
'cluster%s' % cluster,
len(ci[cluster]),
numpy.mean(cluster_corrvals_triu)])
# plot histogram without zeros
plt.figure(figsize=(8, 8))
Expand All @@ -77,21 +79,24 @@ def get_similarity_summary(narps, corrtype='spearman'):
plt.savefig(histfile)
plt.close()
results_df = pandas.DataFrame(corr_summary)
results_df.columns = ['hyp', 'group', 'correlation']
results_df.columns = ['hyp', 'group', 'Cluster size', 'Correlation']
results_df_wide = results_df.pivot(
index='hyp', columns='group', values='correlation')
results_df_wide = results_df_wide[
['mean', 'cluster1', 'cluster2', 'cluster3']]
index='hyp', columns='group',
values=['Correlation', 'Cluster size'])
results_df_wide.columns = [
'All teams',
'Cluster 1',
'Cluster 2',
'Cluster 3'
]
'%s (%s)' % (col[0], col[1]) for col in results_df_wide.columns.values]
del results_df_wide['Cluster size (mean)']
results_df_wide = results_df_wide[
['Correlation (mean)',
'Correlation (cluster1)', 'Cluster size (cluster1)',
'Correlation (cluster2)', 'Cluster size (cluster2)',
'Correlation (cluster3)', 'Cluster size (cluster3)']]

results_df_wide.to_csv(os.path.join(
narps.dirs.dirs['output'],
'correlation_unthresh',
'mean_unthresh_correlation_by_cluster.csv'))
'mean_unthresh_correlation_by_cluster.csv'),
index=False)
return(results_df_wide)


Expand Down
86 changes: 86 additions & 0 deletions ImageAnalyses/MakeSuppFigures.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
#!/usr/bin/env python
# coding: utf-8
"""
Make latex file for supplementary figures
"""

import os
import argparse
import pandas
from narps import Narps

preamble = '''\\documentclass[10pt]{article}
\\usepackage[margin=1in]{geometry}
\\geometry{letterpaper}
\\usepackage{graphicx}
\\usepackage{amssymb}
\\usepackage{epstopdf}
\\usepackage{caption}
\\title{Supplementary Figures}
\\author{Botvinick-Nezer et al.}
\\begin{document}
'''

finale = '\\end{document}\n'


def make_supp_figure_file(narps, figheight=8):
narps.dirs.get_output_dir('SupplementaryMaterials', base='figures')
# load supp figure info
latex = preamble
info = pandas.read_csv('SuppFiguresInfo.tsv', sep='\t')
for i in range(info.shape[0]):
caption = info.loc[i, 'Caption'].replace(
'#', '\\#').replace('%', '\\%')
imgfile = os.path.join(
narps.basedir,
info.loc[i, 'File']
)
latex += '\\begin{figure}[htbp]\n'
latex += '\\begin{center}\n'
latex += '\\includegraphics[height=%din]{%s}\n' % (
info.loc[i, 'Height'], imgfile)
latex += '\\caption*{Supplementary Figure %d: %s}\n' % (
info.loc[i, 'Number'],
caption)
latex += '\\end{center}\n'
latex += '\\end{figure}\n'
latex += '\\newpage\n\n'

latex += finale

outfile = os.path.join(
narps.dirs.dirs['SupplementaryMaterials'],
'SupplementaryFigures.tex'
)
with open(outfile, 'w') as f:
f.write(latex)


if __name__ == "__main__":

# parse arguments
parser = argparse.ArgumentParser(
description='Make latex file for supplementary figures')
parser.add_argument('-b', '--basedir',
help='base directory')
parser.add_argument('-t', '--test',
action='store_true',
help='use testing mode (no processing)')
args = parser.parse_args()

# set up base directory
if args.basedir is not None:
basedir = args.basedir
elif 'NARPS_BASEDIR' in os.environ:
basedir = os.environ['NARPS_BASEDIR']
print("using basedir specified in NARPS_BASEDIR")
else:
basedir = '/data'
print("using default basedir:", basedir)

narps = Narps(basedir)

if not args.test:
make_supp_figure_file(narps)
Loading

0 comments on commit aecf67e

Please sign in to comment.