Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 79 additions & 0 deletions Dockerfile.snap
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
# Note to maintainers: after you make changes to this file, please run `make snap_image`.
# The script will gives instructions to complete the update once it finishes. Be patient, it
# takes a long time to run.
#
# For help with the technical aspects of this Dockerfile, see
# https://snapcraft.io/docs/t/creating-docker-images-for-snapcraft/11739
# https://raw.githubusercontent.com/snapcore/snapcraft/master/docker/stable.Dockerfile
# and https://forum.snapcraft.io/. Note that the snapcraft forum does not appear to be indexed
# effectively (at all?) by google.
#
# See https://docs.docker.com/develop/develop-images/dockerfile_best-practices/
# for guidance on the style of this Dockerfile
FROM ubuntu:focal as builder

RUN apt-get update && apt-get install --yes \
curl \
jq \
squashfs-tools

# Grab the core snap (for backwards compatibility) from the stable channel and unpack it in the proper place
RUN curl -L $(curl -H 'X-Ubuntu-Series: 16' 'https://api.snapcraft.io/api/v1/snaps/details/core' | jq '.download_url' -r) --output core.snap && \
mkdir -p /snap/core && \
unsquashfs -d /snap/core/current core.snap

# Grab the core18 snap (which snapcraft uses as a base) from the stable channel and unpack it in the proper place.
RUN curl -L $(curl -H 'X-Ubuntu-Series: 16' 'https://api.snapcraft.io/api/v1/snaps/details/core18' | jq '.download_url' -r) --output core18.snap && \
mkdir -p /snap/core18 && \
unsquashfs -d /snap/core18/current core18.snap

# Grab the core20 snap from the stable channel and unpack it in the proper place.
RUN curl -L $(curl -H 'X-Ubuntu-Series: 16' 'https://api.snapcraft.io/api/v1/snaps/details/core20' | jq '.download_url' -r) --output core20.snap && \
mkdir -p /snap/core20 && \
unsquashfs -d /snap/core20/current core20.snap

# Grab the snapcraft snap from the stable channel and unpack it in the proper place
RUN curl -L $(curl -H 'X-Ubuntu-Series: 16' 'https://api.snapcraft.io/api/v1/snaps/details/snapcraft?channel=stable' | jq '.download_url' -r) --output snapcraft.snap && \
mkdir -p /snap/snapcraft && \
unsquashfs -d /snap/snapcraft/current snapcraft.snap

# Create a snapcraft runner (TODO: move version detection to the core of snapcraft)
RUN mkdir -p /snap/bin && \
echo "#!/bin/sh" > /snap/bin/snapcraft && \
snap_version="$(awk '/^version:/{print $2}' /snap/snapcraft/current/meta/snap.yaml)" && \
echo "export SNAP_VERSION=\"$snap_version\"" >> /snap/bin/snapcraft && \
echo 'exec "$SNAP/usr/bin/python3" "$SNAP/bin/snapcraft" "$@"' >> /snap/bin/snapcraft && \
chmod +x /snap/bin/snapcraft

# Grab the golang snap from the stable channel, unpack it in the proper place, and create a runner for it
RUN curl -L $(curl -H 'X-Ubuntu-Series: 16' 'https://api.snapcraft.io/api/v1/snaps/details/go?channel=1.16/stable' | jq '.download_url' -r) --output go.snap && \
mkdir -p /snap/go && \
unsquashfs -d /snap/go/current go.snap && \
cd /snap/bin && \
ln -s /snap/go/current/bin/go

FROM ubuntu:focal
COPY --from=builder /snap /snap

# Install Snap build-time dependencies & generate locale
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get clean && apt-get update && apt-get install --yes \
build-essential \
golang-go \
git \
locales \
snapd && \
locale-gen en_US.UTF-8 && \
rm -rf /var/lib/apt/lists/*

# Set the proper environment
ENV LANG en_US.UTF-8
ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
ENV PATH "/snap/bin:$PATH"
ENV SNAP /snap/snapcraft/current
ENV SNAP_NAME snapcraft
ENV SNAP_ARCH amd64

SHELL ["/bin/bash", "-c"]
ENTRYPOINT "snapcraft"
67 changes: 67 additions & 0 deletions MHD Blockchain .sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#!/bin/bash

echo "🚀 Starting MHD Blockchain Environment Setup for macOS..."

# Step 1: Install Homebrew if not installed
if ! command -v brew &>/dev/null; then
echo "🔧 Installing Homebrew..."
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
fi

echo "✅ Homebrew installed."

# Step 2: Install Node.js and npm
echo "🔧 Installing Node.js..."
brew install node

# Step 3: Install Python
echo "🔧 Installing Python 3..."
brew install python

# Step 4: Install Docker
echo "🔧 Installing Docker..."
brew install --cask docker

# Step 5: Install Visual Studio Code
echo "🔧 Installing VS Code..."
brew install --cask visual-studio-code

# Step 6: Install Git and GitHub CLI
echo "🔧 Installing Git and GitHub CLI..."
brew install git
brew install gh

# Step 7: Install Go (optional for node engine)
echo "🔧 Installing GoLang..."
brew install go

# Step 8: Install Rust (optional for performance-critical modules)
echo "🔧 Installing Rust..."
if curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y; then
echo "✅ Rust installed successfully."
else
echo "❌ Failed to install Rust. Please check the installation logs for more details."
exit 1
fi

# Step 9: Install Hardhat
echo "🔧 Setting up Hardhat..."
mkdir MHD-Blockchain
cd MHD-Blockchain
npm init -y
npm install --save-dev hardhat
npx hardhat --version

# Step 10: Create Hardhat Project
npx hardhat init

echo "✅ MHD Blockchain environment setup is complete!"
echo "📁 Project directory: MHD-Blockchain"
echo "💡 To start working, run: cd MHD-Blockchain && code ."

# Step 11: Add deployment script execution
echo "🔧 Executing deployment script..."
chmod +x /workspaces/MHD/MHD-Blockchain/MHD-Blockchain/MHD-Blockchain/deploy.sh
/workspaces/MHD/MHD-Blockchain/MHD-Blockchain/MHD-Blockchain/deploy.sh sepolia

echo "✅ Deployment script executed."
58 changes: 58 additions & 0 deletions Untitled-1.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
#!/bin/bash

# Step 1: Update Homebrew
echo "🔧 Updating Homebrew..."
brew update

# Step 2: Install Node.js and npm
echo "🔧 Installing Node.js..."
brew install node

# Step 3: Install Python
echo "🔧 Installing Python 3..."
brew install python

# Step 4: Install Docker
echo "🔧 Installing Docker..."
brew install --cask docker

# Step 5: Install Visual Studio Code
echo "🔧 Installing VS Code..."
brew install --cask visual-studio-code

# Step 6: Install Git and GitHub CLI
echo "🔧 Installing Git and GitHub CLI..."
brew install git
brew install gh

# Step 7: Install Go (optional for node engine)
echo "🔧 Installing GoLang..."
brew install go

# Step 8: Install Rust (optional for performance-critical modules)
echo "🔧 Installing Rust..."
if ! command -v rustc &>/dev/null; then
if curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y; then
echo "✅ Rust installed successfully."
else
echo "❌ Failed to install Rust. Please check the installation logs for more details."
exit 1
fi
else
echo "✅ Rust is already installed."
fi

# Step 9: Install Hardhat
echo "🔧 Setting up Hardhat..."
mkdir MHD-Blockchain
cd MHD-Blockchain
npm init -y
npm install --save-dev hardhat
npx hardhat --version

# Step 10: Create Hardhat Project
npx hardhat init

echo "✅ MHD Blockchain environment setup is complete!"
echo "📁 Project directory: MHD-Blockchain"
echo "💡 To start working, run: cd MHD-Blockchain && code ."
95 changes: 95 additions & 0 deletions WEB_CONCURRENCY.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
#!/usr/bin/env bash

# This script was created by the Python buildpack to automatically set the `WEB_CONCURRENCY`
# environment variable at dyno boot (if it's not already set), based on the available memory
# and number of CPU cores. The env var is then used by some Python web servers (such as
# gunicorn and uvicorn) to control the default number of server processes that they launch.
#
# The default `WEB_CONCURRENCY` value is calculated as the lowest of either:
# - `<number of dyno CPU cores> * 2 + 1`
# - `<dyno available RAM in MB> / 256` (to ensure each process has at least 256 MB RAM)
#
# Currently, on Heroku dynos this results in the following concurrency values:
# - Eco / Basic / Standard-1X: 2 (capped by the 512 MB available memory)
# - Standard-2X / Private-S / Shield-S: 4 (capped by the 1 GB available memory)
# - Performance-M / Private-M / Shield-M: 5 (based on the 2 CPU cores)
# - Performance-L / Private-L / Shield-L: 17 (based on the 8 CPU cores)
# - Performance-L-RAM / Private-L-RAM / Shield-L-RAM: 9 (based on the 4 CPU cores)
# - Performance-XL / Private-XL / Shield-XL: 17 (based on the 8 CPU cores)
# - Performance-2XL / Private-2XL / Shield-2XL: 33 (based on the 16 CPU cores)
#
# To override these default values, either set `WEB_CONCURRENCY` as an explicit config var
# on the app, or pass `--workers <num>` when invoking gunicorn/uvicorn in your Procfile.

# Note: Since this is a .profile.d/ script it will be sourced, meaning that we cannot enable
# exit on error, have to use return not exit, and returning non-zero doesn't have an effect.

function detect_memory_limit_in_mb() {
local memory_limit_file='/sys/fs/cgroup/memory/memory.limit_in_bytes'

# This memory limits file only exists on Heroku, or when using cgroups v1 (Docker < 20.10).
if [[ -f "${memory_limit_file}" ]]; then
local memory_limit_in_mb=$(($(cat "${memory_limit_file}") / 1048576))

# Ignore values above 1TB RAM, since when using cgroups v1 the limits file reports a
# bogus value of thousands of TB RAM when there is no container memory limit set.
if ((memory_limit_in_mb <= 1048576)); then
echo "${memory_limit_in_mb}"
return 0
fi
fi

return 1
}

function output() {
# Only display log output for web dynos, to prevent breaking one-off dyno scripting use-cases,
# and to prevent confusion from messages about WEB_CONCURRENCY in the logs of non-web workers.
# (We still actually set the env vars for all dyno types for consistency and easier debugging.)
if [[ "${DYNO:-}" == web.* ]]; then
echo "Python buildpack: $*" >&2
fi
}

if ! available_memory_in_mb=$(detect_memory_limit_in_mb); then
# This should never occur on Heroku, but will be common for non-Heroku environments such as Dokku.
output "Couldn't determine available memory. Skipping automatic configuration of WEB_CONCURRENCY."
return 0
fi

if ! cpu_cores=$(nproc); then
# This should never occur in practice, since this buildpack only supports being run on our base
# images, and nproc is installed in all of them.
output "Couldn't determine number of CPU cores. Skipping automatic configuration of WEB_CONCURRENCY."
return 0
fi

output "Detected ${available_memory_in_mb} MB available memory and ${cpu_cores} CPU cores."

# This env var is undocumented and not consistent with what other buildpacks set, however,
# GitHub code search shows there are Python apps in the wild that do rely upon it.
export DYNO_RAM="${available_memory_in_mb}"

if [[ -v WEB_CONCURRENCY ]]; then
output "Skipping automatic configuration of WEB_CONCURRENCY since it's already set."
return 0
fi

minimum_memory_per_process_in_mb=256

# Prevents WEB_CONCURRENCY being set to zero if the environment is extremely memory constrained.
if ((available_memory_in_mb < minimum_memory_per_process_in_mb)); then
max_concurrency_for_available_memory=1
else
max_concurrency_for_available_memory=$((available_memory_in_mb / minimum_memory_per_process_in_mb))
fi

max_concurrency_for_cpu_cores=$((cpu_cores * 2 + 1))

if ((max_concurrency_for_available_memory < max_concurrency_for_cpu_cores)); then
export WEB_CONCURRENCY="${max_concurrency_for_available_memory}"
output "Defaulting WEB_CONCURRENCY to ${WEB_CONCURRENCY} based on the available memory."
else
export WEB_CONCURRENCY="${max_concurrency_for_cpu_cores}"
output "Defaulting WEB_CONCURRENCY to ${WEB_CONCURRENCY} based on the number of CPU cores."
fi
11 changes: 11 additions & 0 deletions _build_snap.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
#!/usr/bin/env bash

set -euo pipefail

DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../"

make clean

echo "building snap"
echo ""
cd "$DIR" && docker run --rm -v "$DIR":/build -w /build sammytheshark/doctl-snap-base
Loading
Loading