Skip to content

Commit

Permalink
tidy
Browse files Browse the repository at this point in the history
  • Loading branch information
zzeppozz committed Oct 18, 2024
1 parent 43b4fb2 commit d4101fe
Show file tree
Hide file tree
Showing 14 changed files with 237 additions and 237 deletions.
11 changes: 3 additions & 8 deletions aws/ec2/userdata_annotate_riis.sh
Original file line number Diff line number Diff line change
@@ -1,19 +1,14 @@
#!/bin/bash
# This is the user data script to be executed on an EC2 instance.

aws configure set default.region us-east-1 && \
aws configure set default.output json

sudo apt update
#sudo apt install apache2 certbot plocate unzip
sudo apt install docker.io
sudo apt install docker-compose-v2

# TODO: change this to pull a Docker image
git clone https://github.com/lifemapper/bison.git

cd bison
sudo docker compose -f docker-compose.task.yml up -d
#sudo docker compose -f docker-compose.development.yml -f docker-compose.yml up
sudo docker compose -f compose.annotate_riis.yml up -d
#sudo docker compose -f compose.development.yml -f compose.yml up
# Executes from /home/bison directory, which contains bison code
sudo docker exec bison-bison-1 venv/bin/python -m bison.tools.annotate_riis
sudo docker exec bison-bison-1 venv/bin/python -m bison.task.annotate_riis
13 changes: 6 additions & 7 deletions aws/events/bison_s0_resolve_riis_lambda.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,7 @@
timeout = 900
waittime = 1
EC2_TASK_INSTANCE_ID = "i-0c7e54e257d5c8574"
EC2_KEY = bison_task_key
# bison_task_ec2 = "arn:aws:ec2:us-east-1:321942852011:instance/i-0595bfd381e64d2c9"
# EC2_TASK_INSTANCE_ARN = "arn:aws:ec2:us-east-1:321942852011:instance/i-0595bfd381e64d2c9"

# Initialize Botocore session
session = boto3.session.Session()
Expand All @@ -29,7 +28,7 @@
client_ssm = session.client("ssm", config=config)

# Bison command
bison_script = "venv/bin/python -m bison.tools.annotate_riis"
bison_script = "venv/bin/python -m bison.task.annotate_riis"


# --------------------------------------------------------------------------------------
Expand All @@ -56,9 +55,9 @@ def lambda_handler(event, context):
try:
instance_meta = response["StartingInstances"][0]
except KeyError:
raise Exception(f"Invalid response returned {instance_meta}")
raise Exception(f"Invalid response returned {response}")
except ValueError:
raise Exception(f"No instances returned in {instance_meta}")
raise Exception(f"No instances returned in {response}")
except Exception:
raise

Expand All @@ -68,10 +67,10 @@ def lambda_handler(event, context):
print(f"Started instance {instance_meta['InstanceId']}. ")
print(f"Moved from {curr_state} to {prev_state}")

# sudo docker compose -f docker-compose.task.yml up
# sudo docker compose -f compose.annotate_riis.yml up
response = client_ssm.send_command(
DocumentName='AWS-RunShellScript',
Parameters={'commands': [bison_script]},
InstanceIds=[bison_task_instance_id]
InstanceIds=[EC2_TASK_INSTANCE_ID]
)
return instance_id
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@
]
}
]
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,4 +24,4 @@
"Action": "sts:AssumeRole"
}
]
}
}
4 changes: 2 additions & 2 deletions aws_scripts/bison_matrix_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from bison.common.log import Logger
from bison.common.aws_util import S3
from bison.common.constants import (
PROJECT, S3_BUCKET, S3_OUT_DIR, TMP_PATH, WORKFLOW_ROLE, S3_LOG_DIR
REGION, S3_BUCKET, S3_OUT_DIR, TMP_PATH, S3_LOG_DIR
)

n = DT.datetime.now()
Expand Down Expand Up @@ -398,7 +398,7 @@ def write_to_csv(self, filename):
script_name = os.path.splitext(os.path.basename(__file__))[0]
logger = Logger(script_name)

s3 = S3(PROJECT, WORKFLOW_ROLE)
s3 = S3(region=REGION)

# Read directly into DataFrame
orig_df = s3.get_dataframe_from_parquet(
Expand Down
Loading

0 comments on commit d4101fe

Please sign in to comment.