Skip to content

Commit

Permalink
Merge pull request #12 from Kaister300/github-pages
Browse files Browse the repository at this point in the history
Added GH Deploy Worklow
  • Loading branch information
Kaister300 authored Oct 28, 2024
2 parents 1e79358 + ce3dd7d commit 0684b93
Show file tree
Hide file tree
Showing 8 changed files with 91 additions and 183 deletions.
66 changes: 66 additions & 0 deletions .github/workflows/deploy-gh-pages.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# Simple workflow for deploying static content to GitHub Pages
name: Deploy static content to GH Pages

on:
# Runs on pushes targeting the default branch
push:
branches: ["main"]

# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write

# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false

jobs:
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4

- name: Setup Pages
uses: actions/configure-pages@v5

- name: Install Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"

- name: Install dependencies
run: |
npm install
pip install -r project/requirements.txt
- name: Create .env file
run: |
cat <<EOF > .env
WEBSITE=${{ vars.GH_PAGES_URL }}
GOOGLE_SEARCHCONSOLE_AUTH=${{ secrets.GOOGLE_SEARCHCONSOLE_AUTH }}
BING_WEBMASTER_AUTH=${{ secrets.BING_WEBMASTER_AUTH }}
EOF
- name: Build static files
run: python3 project/make.py

- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
path: './project/output'

- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4
49 changes: 0 additions & 49 deletions .github/workflows/deploy-page.yaml

This file was deleted.

20 changes: 0 additions & 20 deletions .github/workflows/on-manual.yaml

This file was deleted.

16 changes: 0 additions & 16 deletions .github/workflows/on-push.yaml

This file was deleted.

2 changes: 0 additions & 2 deletions project/DeploymentCommand.txt

This file was deleted.

90 changes: 24 additions & 66 deletions project/make.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,11 @@
from datetime import datetime
from dotenv import load_dotenv

# Project Directory
PROJECT_DIR = Path(__file__).parent

# Loads .env file
load_dotenv()
load_dotenv(dotenv_path=PROJECT_DIR.parent / ".env")


class TerminalColours(Enum):
Expand All @@ -37,34 +40,6 @@ def print_terminal(string: str, c: TerminalColours):
print(f"{c.value}{string}{TerminalColours.END.value}")


def build_headers(dest_dir, website):
"""
Constructs _headers file.
Parameters:
dest_dir (str): The destination directory for the _headers file.
website (str): The website URL.
Returns:
None
"""
file_extensions = ["js", "json", "css"]
robot_tag = " X-Robots-Tag: noindex\n"

with open(f"{dest_dir}_headers", "w", encoding="utf-8") as file:
for x in file_extensions:
file.write(f"{website}*.{x}\n")
file.write(robot_tag)

if os.getenv("BING_WEBMASTER_AUTH"):
file.write(f"{website}BingSiteAuth.xml\n")
file.write(robot_tag)

if google_auth := os.getenv("GOOGLE_SEARCHCONSOLE_AUTH"):
file.write(f"{website}{google_auth}\n")
file.write(robot_tag)


def build_sitemap(dest_dir, website, project_dir):
"""
Constructs sitemap.xml file.
Expand All @@ -80,7 +55,7 @@ def build_sitemap(dest_dir, website, project_dir):
epoc_time = datetime.fromtimestamp(Path(project_dir).stat().st_mtime)
mod_time = str(epoc_time).split(" ", maxsplit=1)[0]

with open(f"{dest_dir}sitemap.xml", "w", encoding="utf-8") as file:
with open(f"{dest_dir}/sitemap.xml", "w", encoding="utf-8") as file:
file.write('<?xml version="1.0" encoding="UTF-8"?>\n')
file.write('<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">\n')
file.write(" <url>\n")
Expand All @@ -101,7 +76,7 @@ def build_google_auth(dest_dir, auth):
Returns:
None
"""
with open(f"{dest_dir}{auth}", "w", encoding="utf-8") as file:
with open(f"{dest_dir}/{auth}", "w", encoding="utf-8") as file:
file.write(f"google-site-verification: {auth}")


Expand All @@ -116,14 +91,14 @@ def build_bing_auth(dest_dir, auth):
Returns:
None
"""
with open(f"{dest_dir}BingSiteAuth.xml", "w", encoding="utf-8") as file:
with open(f"{dest_dir}/BingSiteAuth.xml", "w", encoding="utf-8") as file:
file.write('<?xml version="1.0">\n')
file.write("<users>\n")
file.write(f" <user>{auth}</user>\n")
file.write("</users>")


def build_robots_txt(dest_dir):
def build_robots_txt(dest_dir, website):
"""
Constructs robots.txt file.
Expand All @@ -133,22 +108,22 @@ def build_robots_txt(dest_dir):
Returns:
None
"""
with open(f"{dest_dir}robots.txt", "w", encoding="utf-8") as file:
with open(f"{dest_dir}/robots.txt", "w", encoding="utf-8") as file:
file.write("User-agent: *\n")
file.write("Disallow: /*.css\n")
file.write("Disallow: /scripts\n")
file.write("Disallow: /paragondetails\n")
file.write("Disallow: /paragoncosts\n")
if os.getenv("WEBSITE"):
file.write(f"Sitemap: {os.getenv('WEBSITE')}sitemap.xml")
if website:
file.write(f"Sitemap: {website}sitemap.xml")


def build_website():
"""
Initiates the build of the website.
"""
src_dir = "project/webpage/"
build_dir = "project/output/"
src_dir = PROJECT_DIR / "webpage"
build_dir = PROJECT_DIR / "output"

# Deletes project folder if it exists
if os.path.exists(build_dir):
Expand All @@ -158,20 +133,17 @@ def build_website():
shutil.copytree(src_dir, build_dir, dirs_exist_ok=True)
print_terminal("Copied Webpage Folder", TerminalColours.OK)

# Creates robots.txt
build_robots_txt(build_dir)
print_terminal("Robots.txt Built", TerminalColours.OK)

# Adds _headers and sitemap.xml
# Adds sitemap.xml
website = os.getenv("WEBSITE")
if website:
build_headers(build_dir, website)
print_terminal("_headers Built", TerminalColours.OK)

build_sitemap(build_dir, website, src_dir)
print_terminal("Sitemap Built", TerminalColours.OK)
else:
print_terminal("Skipped _headers and Sitemap Files", TerminalColours.NO)
print_terminal("Skipped Sitemap File", TerminalColours.NO)

# Creates robots.txt
build_robots_txt(build_dir, website)
print_terminal("Robots.txt Built", TerminalColours.OK)

# Adds Google and Bing Auth Files
google_auth = os.getenv("GOOGLE_SEARCHCONSOLE_AUTH")
Expand All @@ -195,26 +167,12 @@ def main():
"""
Main functin for static file build.
"""

# Gets working directory
working_dir = os.getcwd()

# Gets correct symbol for filepath
# '/' for Unix & '\' for Windows
split_symbol = "/"
if "\\" in working_dir:
split_symbol = "\\"

# Checks if build script is running in root of Git project
if working_dir.split(split_symbol)[-1] == "BTD6ParagonCalculator":
working_dir += f"{split_symbol}project"
print_terminal("Build Initiated", TerminalColours.BOLD)
print_terminal("Build Initiated", TerminalColours.BOLD)
try:
build_website()
else:
print_terminal(
"Please open build file from root directory of the Git project",
TerminalColours.WARN,
)
except Exception as e: # pylint: disable=broad-except
print_terminal(f"Error: {type(e).__name__} - {e}", TerminalColours.NO)
print_terminal("Build Failed", TerminalColours.NO)


if __name__ == "__main__":
Expand Down
3 changes: 1 addition & 2 deletions project/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
python-dotenv
flask
python-dotenv
28 changes: 0 additions & 28 deletions project/server.py

This file was deleted.

0 comments on commit 0684b93

Please sign in to comment.