Skip to content

Commit

Permalink
testing
Browse files Browse the repository at this point in the history
  • Loading branch information
zzeppozz committed Dec 20, 2024
1 parent d515868 commit 6fc6d2a
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 10 deletions.
2 changes: 0 additions & 2 deletions bison/spnet/pam_matrix.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,6 @@ def __init__(

# Populate this on computation, with keys used in filename construction
self.stats_matrices = {}
for key in STATISTICS_TYPE.all():
self.stats_matrices[key] = None

HeatmapMatrix.__init__(
self, cmp_pam_coo_array, table_type, datestr, cmp_row_categ, cmp_col_categ,
Expand Down
16 changes: 8 additions & 8 deletions bison/task/calc_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -148,7 +148,7 @@ def download_dataframe(s3, table_type, datestr, bucket, bucket_dir):

out_filename = heatmap.compress_to_file(local_path=TMP_PATH)
s3_mtx_key = f"{S3_SUMMARY_DIR}/{os.path.basename(out_filename)}"
s3.upload(out_filename, S3_BUCKET, s3_mtx_key, overwrite=overwrite)
_uri = s3.upload(out_filename, S3_BUCKET, s3_mtx_key, overwrite=overwrite)

# .................................
# Create a summary matrix for each dimension of sparse matrix and upload
Expand All @@ -157,13 +157,13 @@ def download_dataframe(s3, table_type, datestr, bucket, bucket_dir):
spsum_table_type = sp_sum_mtx.table_type
sp_sum_filename = sp_sum_mtx.compress_to_file()
s3_spsum_key = f"{S3_SUMMARY_DIR}/{os.path.basename(sp_sum_filename)}"
s3.upload(sp_sum_filename, S3_BUCKET, s3_spsum_key, overwrite=overwrite)
_uri = s3.upload(sp_sum_filename, S3_BUCKET, s3_spsum_key, overwrite=overwrite)

od_sum_mtx = SummaryMatrix.init_from_heatmap(heatmap, axis=1)
odsum_table_type = od_sum_mtx.table_type
od_sum_filename = od_sum_mtx.compress_to_file()
s3_odsum_key = f"{S3_SUMMARY_DIR}/{os.path.basename(od_sum_filename)}"
s3.upload(od_sum_filename, S3_BUCKET, s3_odsum_key, overwrite=overwrite)
_uri = s3.upload(od_sum_filename, S3_BUCKET, s3_odsum_key, overwrite=overwrite)

# .................................
# Create PAM from Heatmap
Expand All @@ -180,7 +180,7 @@ def download_dataframe(s3, table_type, datestr, bucket, bucket_dir):
stats_data_dict, stats_meta_dict, table_type, datestr = PAM.uncompress_zipped_data(
stats_zip_filename)
stats_key = f"{S3_OUT_DIR}/{os.path.basename(stats_zip_filename)}"
s3.upload(stats_zip_filename, S3_BUCKET, stats_key, overwrite=overwrite)
_uri = s3.upload(stats_zip_filename, S3_BUCKET, stats_key, overwrite=overwrite)

"""
from bison.task.calc_stats import *
Expand All @@ -205,7 +205,7 @@ def download_dataframe(s3, table_type, datestr, bucket, bucket_dir):
out_filename = heatmap.compress_to_file(local_path=TMP_PATH)
s3_mtx_key = f"{S3_SUMMARY_DIR}/{os.path.basename(out_filename)}"
s3.upload(out_filename, S3_BUCKET, s3_mtx_key, overwrite=overwrite)
_uri = s3.upload(out_filename, S3_BUCKET, s3_mtx_key, overwrite=overwrite)
# .................................
# Create a summary matrix for each dimension of sparse matrix and upload
Expand All @@ -214,13 +214,13 @@ def download_dataframe(s3, table_type, datestr, bucket, bucket_dir):
spsum_table_type = sp_sum_mtx.table_type
sp_sum_filename = sp_sum_mtx.compress_to_file()
s3_spsum_key = f"{S3_SUMMARY_DIR}/{os.path.basename(sp_sum_filename)}"
s3.upload(sp_sum_filename, S3_BUCKET, s3_spsum_key, overwrite=overwrite)
_uri = s3.upload(sp_sum_filename, S3_BUCKET, s3_spsum_key, overwrite=overwrite)
od_sum_mtx = SummaryMatrix.init_from_heatmap(heatmap, axis=1)
odsum_table_type = od_sum_mtx.table_type
od_sum_filename = od_sum_mtx.compress_to_file()
s3_odsum_key = f"{S3_SUMMARY_DIR}/{os.path.basename(od_sum_filename)}"
s3.upload(od_sum_filename, S3_BUCKET, s3_odsum_key, overwrite=overwrite)
_uri = s3.upload(od_sum_filename, S3_BUCKET, s3_odsum_key, overwrite=overwrite)
# .................................
# Create PAM from Heatmap
Expand All @@ -237,5 +237,5 @@ def download_dataframe(s3, table_type, datestr, bucket, bucket_dir):
stats_data_dict, stats_meta_dict, table_type, datestr = PAM.uncompress_zipped_data(
stats_zip_filename)
stats_key = f"{S3_SUMMARY_DIR}/{os.path.basename(stats_zip_filename)}"
s3.upload(stats_zip_filename, S3_BUCKET, stats_key, overwrite=overwrite)
_uri = s3.upload(stats_zip_filename, S3_BUCKET, stats_key, overwrite=overwrite)
"""

0 comments on commit 6fc6d2a

Please sign in to comment.