Skip to content

Commit

Permalink
WIP: test out uplooad
Browse files Browse the repository at this point in the history
  • Loading branch information
yoland68 committed Jun 16, 2024
1 parent a12843b commit 355a749
Show file tree
Hide file tree
Showing 4 changed files with 76 additions and 116 deletions.
110 changes: 24 additions & 86 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,16 +110,6 @@ runs:
conda list
shell: bash -el {0}

# - name: Download models
# if: ${{ inputs.os != 'windows' }}
# run: |
# cd ${{ github.action_path }}
# ls "${{ github.workspace }}"
# pip3 install -r requirements.txt
# python3 download-models.py raw '${{ inputs.models-json }}' "${{ github.workspace }}/models/"
# ls -R "${{ github.workspace }}/models/"
# shell: bash -el {0}

- name: '[Unix] Install dependencies'
if: ${{ inputs.os != 'windows' }}
shell: bash -el {0}
Expand All @@ -137,6 +127,13 @@ runs:
cp "${HOME}/v1-5-pruned-emaonly.ckpt" "${{ github.workspace }}/models/checkpoints/"
cp "${HOME}/epiNoiseoffset_v2.safetensors" "${{ github.workspace }}/models/loras/"
- name: '[Unix] Authenticate to Google Cloud'
shell: bash -el {0}
if: ${{ inputs.os != 'windows' }}
run: |
echo "${{ secrets.GCP_SERVICE_ACCOUNT_KEY }}" > /tmp/keyfile.json
gcloud auth activate-service-account --key-file=/tmp/keyfile.json
- name: '[Unix] Run Python application quick test'
if: ${{ inputs.os != 'windows' }}
shell: bash -el {0}
Expand All @@ -156,6 +153,21 @@ runs:
python3 poll_server_start.py > application.log 2>&1
shell: bash -el {0}

- name: '[Unix] Get Commit Details'
id: unix_get_commit_details
if: ${{ inputs.os != 'windows' }}
shell: bash -el {0}
run: |
TIMESTAMP=$(git show -s --format=%cI HEAD^)
MESSAGE=$(git show -s --format=%s HEAD^)
COMMIT_HASH=$(git rev-parse HEAD^)
echo "Commit time: $TIMESTAMP"
echo "Commit title: $MESSAGE"
echo "Commit hash: $COMMIT_HASH"
echo "commit_time=$TIMESTAMP" >> $GITHUB_OUTPUT
echo "commit_title=$MESSAGE" >> $GITHUB_OUTPUT
echo "commit_hash=$COMMIT_HASH" >> $GITHUB_OUTPUT
- name: '[Unix] Queue Prompts'
id: unix_queue_prompt
if: ${{ inputs.os != 'windows' }}
Expand Down Expand Up @@ -185,84 +197,10 @@ runs:

- name: '[Unix] Debug print out commit timestamp and commit message'
if: ${{ inputs.os != 'windows' }}
shell: bash
shell: bash -el {0}
run: |
echo "Event: ${{ github.event }}"
- name: '[Unix] Get Commit Details'
id: unix_get_commit_details
if: ${{ inputs.os != 'windows' }}
shell: bash
run: |
TIMESTAMP=$(git show -s --format=%cI HEAD^)
MESSAGE=$(git show -s --format=%s HEAD^)
COMMIT_HASH=$(git rev-parse HEAD^)
echo "Commit time: $TIMESTAMP"
echo "Commit title: $MESSAGE"
echo "Commit hash: $COMMIT_HASH"
echo "commit_time=$TIMESTAMP" >> $GITHUB_OUTPUT
echo "commit_title=$MESSAGE" >> $GITHUB_OUTPUT
echo "commit_hash=$COMMIT_HASH" >> $GITHUB_OUTPUT
- name: '[Unix-Only] Install jq'
uses: dcarbone/[email protected]

- name: '[Unix] Call API to upload artifact details'
if: ${{ inputs.os != 'windows' && success() }}
shell: bash
run: |
payload=$(jq -n \
--arg repo "${{ github.repository }}" \
--arg run_id "${{ github.run_id }}" \
--arg os "${{ inputs.os }}" \
--arg cuda_version "${{ inputs.cuda_version }}" \
--arg output_files_gcs_paths "${{ steps.upload-output-files.outputs.uploaded }}" \
--arg commit_hash "${{ steps.unix_get_commit_details.outputs.commit_hash }}" \
--arg commit_time "${{ steps.unix_get_commit_details.outputs.commit_time }}" \
--arg commit_message "${{ steps.unix_get_commit_details.outputs.commit_title }}" \
--arg branch_name "${{ github.ref_name }}" \
--arg bucket_name "${{ inputs.gcs_bucket_name }}" \
--arg workflow_name "${{ inputs.workflow_name }}" \
--argjson start_time "${{ steps.unix_start_time.outputs.start_time }}" \
--argjson end_time "${{ steps.unix_end_time.outputs.end_time }}" \
'{
repo: $repo,
run_id: $run_id,
os: $os,
cuda_version: $cuda_version,
output_files_gcs_paths: $output_files_gcs_paths,
commit_hash: $commit_hash,
commit_time: $commit_time,
commit_message: $commit_message,
branch_name: $branch_name,
bucket_name: $bucket_name,
workflow_name: $workflow_name,
start_time: $start_time,
end_time: $end_time
}')
echo "$payload"
response_code=$(curl -o "${{ github.workspace }}/application.log" \
-s -w "%{http_code}" \
-X POST "${{inputs.api_endpoint}}" \
-H "Content-Type: application/json" \
-d "$payload")
if [[ $response_code -ne 200 ]]; then
echo "API request failed with status code $response_code and response body"
cat "${{ github.workspace }}/application.log"
exit 1
fi
- name: '[Unix] Upload Output Files'
uses: actions/upload-artifact@v4
with:
name: output-files-${{ github.job }}-${{ inputs.os }}-${{inputs.workflow_name}}-${{ github.run_id }}
path: ${{ github.workspace }}/output/**

- name: '[Unix] Upload log file'
uses: actions/upload-artifact@v4
if: ${{ success() || failure() }}
Expand All @@ -272,7 +210,7 @@ runs:

- name: '[Unix] Cleanup Repo'
if: ${{ inputs.os != 'windows' && ( success() || failure() ) }}
shell: bash
shell: bash -el {0}
run: rm -rf ${{ github.workspace }}/*

- name: '[Unix] Cleanup Conda'
Expand Down
53 changes: 36 additions & 17 deletions action_yaml_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,16 @@

parser = argparse.ArgumentParser(description="Check if action yaml files have step names and if windows actions are aligned with unix actions.")
parser.add_argument("-c", "--complete", action="store_true", help="Include complete step information")
parser.add_argument(
"-k", "--key", type=str, default="run", help="Include complete step information"
)
parser.add_argument(
"--fallback-key", type=str, default="", help="Include complete step information"
)
parser.add_argument("yaml_file", help="Path to the YAML file")

def conditional_zip(x, y, step_name_runs_map, complete: bool = False):

def conditional_zip(x, y, step_name_extra_info_map, complete: bool = False):
combined_list = []
i = 0 # Pointer for x
j = 0 # Pointer for y
Expand All @@ -22,8 +29,8 @@ def conditional_zip(x, y, step_name_runs_map, complete: bool = False):
x_curr = x[i] if i < len(x) else ''
y_curr = y[j] if j < len(y) else ''

x_runs = step_name_runs_map[x_curr] if x_curr != '' else ''
y_runs = step_name_runs_map[y_curr] if y_curr != '' else ''
x_runs = step_name_extra_info_map[x_curr] if x_curr != "" else ""
y_runs = step_name_extra_info_map[y_curr] if y_curr != "" else ""

if '-Only]' in x_curr:
if complete:
Expand All @@ -46,18 +53,21 @@ def conditional_zip(x, y, step_name_runs_map, complete: bool = False):

return combined_list

def get_step_names(yaml_file, complete: bool = False):

def get_step_names(
yaml_file, complete: bool = False, step_key: str = "run", fallback_key: str = ""
):
with open(yaml_file, 'r') as file:
data = yaml.safe_load(file)

steps = data['runs']['steps']

for step in steps:
if 'name' not in step:
raise Exception("Step name not found in the action file: {}".format(step))
step_name_runs_map = {}

step_name_extra_info_map = {}

step_names = [step['name'] for step in steps if '[Universal]' not in step['name']]
unix_step_names = []
wins_step_names = []
Expand All @@ -67,20 +77,29 @@ def get_step_names(yaml_file, complete: bool = False):

if '[Win' in step['name']:
wins_step_names.append(step['name'])
step_name_runs_map[step['name']] = step['run'] if 'run' in step else step.get('uses', '[No run or uses info]')
step_name_extra_info_map[step["name"]] = (
step[step_key]
if step_key in step
else step.get(
fallback_key if fallback_key != "" else "lalala", "[Not Info]"
)
)

if len(unix_step_names) + len(wins_step_names) != len(step_names):
uncategorized_steps = set(step_names) - set(unix_step_names + wins_step_names)
raise Exception("There steps are not categorized: {}".format(uncategorized_steps))

tabulated_list = conditional_zip(unix_step_names, wins_step_names, step_name_runs_map, complete=complete)


tabulated_list = conditional_zip(
unix_step_names, wins_step_names, step_name_extra_info_map, complete=complete
)

print(tabulate(tabulated_list, headers=['Unix', 'Windows'], tablefmt='grid'))

if __name__ == "__main__":

args = parser.parse_args()
if __name__ == "__main__":
args = parser.parse_args()

yaml_file = args.yaml_file
get_step_names(yaml_file, args.complete)

yaml_file = args.yaml_file
get_step_names(
yaml_file, args.complete, step_key=args.key, fallback_key=args.fallback_key
)
27 changes: 15 additions & 12 deletions queue_prompt.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import argparse
import datetime
import json
import subprocess
import datetime

import requests
from google.cloud import storage

from firebase_admin import storage

def read_json_file(file_path):
with open(file_path, 'r', encoding='utf-8') as file:
Expand All @@ -29,13 +29,16 @@ def is_completed(status_response, prompt_id):
and status_response[prompt_id]['status'].get('completed', False)
)

#TODO: add support for different file type
def upload_img_from_filename(bucket_name: str, gs_path: str, file_path: str, public: bool = True, content_type="image/png"):
bucket = storage.bucket(bucket_name)
blob = bucket.blob(gs_path)
blob.upload_from_filename(file_path, content_type=content_type)
if public:
blob.make_public()

def upload_to_gcs(bucket_name: str, destination_blob_name: str, source_file_name: str):
storage_client = storage.Client()
import pdb
pdb.set_trace()
bucket = storage_client.get_bucket(bucket_name)

blob = bucket.blob(destination_blob_name)
blob.upload_from_filename(source_file_name)
blob.make_public()


def send_payload_to_api(args, output_files_gcs_paths,
Expand Down Expand Up @@ -98,15 +101,16 @@ def main(args):
end_time = int(datetime.datetime.now().timestamp())

#TODO: add support for multiple file outputs
gs_path = f"output-files/{args.github_action_workflow_name}-{args.os}-{args.comfy_workflow_name}-run-${args.run_id}"
upload_img_from_filename(args.bucket_name, gs_path, f"{args.workspace_path}/output/{args.output_file_prefix}_{counter:05}_.png", public=True)
gs_path = f"output-files/{args.github_action_workflow_name}-{args.os}-{file_name}-run-${args.run_id}"
upload_to_gcs(args.gsc_bucket_name, gs_path, f"{args.workspace_path}/output-files/{args.output_file_prefix}_{counter:05}_.png")

send_payload_to_api(args, gs_path, start_time, end_time)
counter += 1


if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Send a JSON file contents to a server as a prompt.')
parser.add_argument('--api-endpoint', type=str, help='API endpoint.')
parser.add_argument('--comfy-workflow-names', type=str, help='List of comfy workflow names.')
parser.add_argument('--github-action-workflow-name', type=str, help='Github action workflow name.')
parser.add_argument('--os', type=str, help='Operating system.')
Expand All @@ -117,7 +121,6 @@ def main(args):
parser.add_argument('--commit-time', type=str, help='Commit time.')
parser.add_argument('--commit-message', type=str, help='Commit message.')
parser.add_argument('--branch-name', type=str, help='Branch name.')
parser.add_argument('--workflow-file-names', type=str, help='Workflow file names.')
parser.add_argument('--gsc-bucket-name', type=str, help='Name of the GCS bucket to store the output files in.')
parser.add_argument('--workspace-path', type=str, help='Workspace (ComfyUI repo) path, likely ${HOME}/action_runners/_work/ComfyUI/ComfyUI/.')
parser.add_argument('--action-path', type=str, help='Action path., likely ${HOME}/action_runners/_work/comfy-action/.')
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
requests
comfy-cli
firebase_admin
google-cloud-storage

0 comments on commit 355a749

Please sign in to comment.