From 3f37719422b9f5dc7303d07a4b06b420041e4f61 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Wed, 29 Jan 2020 22:16:05 +0000 Subject: [PATCH 001/105] Trying out a 'hello world' Github action - Add trial workflow to use "hello world" action - Add structure and Docker dependency management - Remove standard libraries from requirements file - Fix permissions of src/helloworld.py for GH actions - Fix path to helloworld entrypoint in Dockerfile - Switch to RUN command in Dockerfile - Forward args from entrypoint to main script - Ensure all files are copied to Docker image - Dummy edit to try and bust GH action cache - Fix path to entrypoint in Dockerfile - What about this? - Andddd....this? - add debug statement to entrypoint - let's try being explicit - trying to align working directories - fix improper use of Docker `COPY` - fix entrypoint specification - debugging dockerfile - moar debuggings - guess pushd isn't available - simplify - complexify - debug - moar debug - rm debug --- .github/workflows/main.yml | 14 ++++++++++++++ Dockerfile | 17 +++++++++++++++++ action.yml | 15 +++++++++++++++ entrypoint.sh | 6 ++++++ requirements.txt | 1 + src/helloworld.py | 7 +++++++ 6 files changed, 60 insertions(+) create mode 100644 .github/workflows/main.yml create mode 100644 Dockerfile create mode 100644 action.yml create mode 100755 entrypoint.sh create mode 100644 requirements.txt create mode 100755 src/helloworld.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml new file mode 100644 index 0000000..ab6571e --- /dev/null +++ b/.github/workflows/main.yml @@ -0,0 +1,14 @@ +on: [push] + +jobs: + hello_world_job: + runs-on: ubuntu-latest + name: A job to say hello + steps: + - name: Hello world action step + id: hello + uses: dabrady/syndicate@master + with: + who-to-greet: 'Dan the Dev' + - name: Get the output time + run: echo "The time was ${{ steps.hello.outputs.time }}" diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..c9b453c --- /dev/null +++ b/Dockerfile @@ -0,0 +1,17 @@ +FROM python:3-alpine + +WORKDIR /action + +# Copy action metadata +COPY LICENSE README.md requirements.txt ./ +# Copy action code +COPY entrypoint.sh ./ +COPY src/ ./src/ + +# Install action requirements +RUN pip install --no-cache-dir -r ./requirements.txt + +# Hardcoding WORKDIR into ENTRYPOINT. +# Can't use environment variables in "exec" form of ENTRYPOINT, but "exec" form +# is recommended. +ENTRYPOINT [ "/action/entrypoint.sh" ] \ No newline at end of file diff --git a/action.yml b/action.yml new file mode 100644 index 0000000..2f1e328 --- /dev/null +++ b/action.yml @@ -0,0 +1,15 @@ +name: 'Hello World' +description: 'Greet someone and record the time' +inputs: + who-to-greet: # id of input + description: 'Who to greet' + required: true + default: 'world' +outputs: + time: # id of output + description: 'The time we greeted you' +runs: + using: 'docker' + image: 'Dockerfile' + args: + - ${{ inputs.who-to-greet }} diff --git a/entrypoint.sh b/entrypoint.sh new file mode 100755 index 0000000..6b4a151 --- /dev/null +++ b/entrypoint.sh @@ -0,0 +1,6 @@ +#!/usr/bin/env sh + +# TODO Can we programmatically match Dockerfile? +WORKDIR=/action +cd $WORKDIR +"$WORKDIR"/src/helloworld.py $@ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..f229360 --- /dev/null +++ b/requirements.txt @@ -0,0 +1 @@ +requests diff --git a/src/helloworld.py b/src/helloworld.py new file mode 100755 index 0000000..8cf76c8 --- /dev/null +++ b/src/helloworld.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 + +import sys +from datetime import datetime + +print(f"Hello, {sys.argv[1]}!") +print(f"::set-output name=time::{datetime.now()}") From fce24ac7beeba5ceb907c10bbf1f5e70331a26ed Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Thu, 30 Jan 2020 11:57:08 +0000 Subject: [PATCH 002/105] Transform example action into 'syndicate' rename workflow --- .github/workflows/main.yml | 15 +++++++++------ action.yml | 14 +++++++------- entrypoint.sh | 2 +- src/{helloworld.py => syndicate.py} | 2 +- 4 files changed, 18 insertions(+), 15 deletions(-) rename src/{helloworld.py => syndicate.py} (64%) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index ab6571e..e8f2e63 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,14 +1,17 @@ on: [push] jobs: - hello_world_job: + syndicate: runs-on: ubuntu-latest - name: A job to say hello + name: Example syndicate workflow steps: - - name: Hello world action step - id: hello + - name: Syndicate published content changes + id: syndicate uses: dabrady/syndicate@master with: - who-to-greet: 'Dan the Dev' + silos: + - DEV + - Medium + - Mars - name: Get the output time - run: echo "The time was ${{ steps.hello.outputs.time }}" + run: echo "The time was ${{ steps.syndicate.outputs.time }}" diff --git a/action.yml b/action.yml index 2f1e328..e98ba10 100644 --- a/action.yml +++ b/action.yml @@ -1,15 +1,15 @@ -name: 'Hello World' -description: 'Greet someone and record the time' +name: 'Syndicate' +description: 'Publish content to other places' inputs: - who-to-greet: # id of input - description: 'Who to greet' + silos: # id of input + description: 'A list of names indicating the platforms to publish your content to.' required: true - default: 'world' + default: [] outputs: time: # id of output - description: 'The time we greeted you' + description: 'The time this action finished' runs: using: 'docker' image: 'Dockerfile' args: - - ${{ inputs.who-to-greet }} + - ${{ inputs.silos }} diff --git a/entrypoint.sh b/entrypoint.sh index 6b4a151..07e7daa 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -3,4 +3,4 @@ # TODO Can we programmatically match Dockerfile? WORKDIR=/action cd $WORKDIR -"$WORKDIR"/src/helloworld.py $@ +"$WORKDIR"/src/syndicate.py $@ diff --git a/src/helloworld.py b/src/syndicate.py similarity index 64% rename from src/helloworld.py rename to src/syndicate.py index 8cf76c8..8452947 100755 --- a/src/helloworld.py +++ b/src/syndicate.py @@ -3,5 +3,5 @@ import sys from datetime import datetime -print(f"Hello, {sys.argv[1]}!") +print(f"You want to publish to these places: {sys.argv[1:-1]}") print(f"::set-output name=time::{datetime.now()}") From 968ed21f620dfae46e05f717cdc9fbb728058998 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Thu, 30 Jan 2020 12:04:11 +0000 Subject: [PATCH 003/105] Using multiline string as workaround for input seq - update workflow info - fix python log - dummy commit: i don't think it worked peek at the available environment --- .github/workflows/main.yml | 12 +++++++----- action.yml | 2 +- src/syndicate.py | 7 ++++++- 3 files changed, 14 insertions(+), 7 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e8f2e63..5f7d91e 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -5,13 +5,15 @@ jobs: runs-on: ubuntu-latest name: Example syndicate workflow steps: - - name: Syndicate published content changes + - name: Syndicate to silos id: syndicate uses: dabrady/syndicate@master with: - silos: - - DEV - - Medium - - Mars + # Using a YAML multiline string as a workaround for a list argument. + # @see https://github.community/t5/GitHub-Actions/Can-action-inputs-be-arrays/td-p/33776 + silos: | + DEV + Medium + Mars - name: Get the output time run: echo "The time was ${{ steps.syndicate.outputs.time }}" diff --git a/action.yml b/action.yml index e98ba10..27a3a61 100644 --- a/action.yml +++ b/action.yml @@ -4,7 +4,7 @@ inputs: silos: # id of input description: 'A list of names indicating the platforms to publish your content to.' required: true - default: [] + default: '' outputs: time: # id of output description: 'The time this action finished' diff --git a/src/syndicate.py b/src/syndicate.py index 8452947..d0801d1 100755 --- a/src/syndicate.py +++ b/src/syndicate.py @@ -1,7 +1,12 @@ #!/usr/bin/env python3 import sys +import os from datetime import datetime -print(f"You want to publish to these places: {sys.argv[1:-1]}") +print(f"You want to publish to these places? {sys.argv[1:]}") + +print("We have access to these environment variables:") +print(os.environ) + print(f"::set-output name=time::{datetime.now()}") From 9ad46054b238c4191f5afee933fe61d6a364e257 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Thu, 30 Jan 2020 12:44:59 +0000 Subject: [PATCH 004/105] Document silo API key usage in example workflow --- .github/workflows/main.yml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5f7d91e..3ecafd5 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -12,8 +12,19 @@ jobs: # Using a YAML multiline string as a workaround for a list argument. # @see https://github.community/t5/GitHub-Actions/Can-action-inputs-be-arrays/td-p/33776 silos: | - DEV - Medium - Mars + dev + medium + mars + env: + # Set necessary API keys as secrets of your repo and specify them here in this format: + # + # _API_KEY + # + # e.g. + # + # medium_API_KEY + # + # so that the action can find them easily when needed. + dev_API_KEY: ${{ secrets.dev_API_KEY }} - name: Get the output time run: echo "The time was ${{ steps.syndicate.outputs.time }}" From 161fe939c38284b54175421b47850f83360e6579 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Thu, 30 Jan 2020 12:45:20 +0000 Subject: [PATCH 005/105] Check for available API keys apparently sysargs already get split by newline --- src/syndicate.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/syndicate.py b/src/syndicate.py index d0801d1..8e808cb 100755 --- a/src/syndicate.py +++ b/src/syndicate.py @@ -4,9 +4,11 @@ import os from datetime import datetime -print(f"You want to publish to these places? {sys.argv[1:]}") +silos = sys.argv[1:] +print(f"You want to publish to these places? {silos}") -print("We have access to these environment variables:") -print(os.environ) +print(f"Do we have the necessary API keys?") +available_keys = {silo:(f"{silo}_API_KEY" in os.environ) for silo in silos } +print(available_keys) print(f"::set-output name=time::{datetime.now()}") From 01210fafebe999c0c6642584ccce1ca14c5c8758 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 13:09:57 +0000 Subject: [PATCH 006/105] Add better structure and move to Python entrypoint --- Dockerfile | 6 +++--- entrypoint.py | 9 +++++++++ entrypoint.sh | 6 ------ src/syndicate.py | 14 -------------- syndicate/__init__.py | 10 ++++++++++ 5 files changed, 22 insertions(+), 23 deletions(-) create mode 100755 entrypoint.py delete mode 100755 entrypoint.sh delete mode 100755 src/syndicate.py create mode 100755 syndicate/__init__.py diff --git a/Dockerfile b/Dockerfile index c9b453c..4512444 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,8 +5,8 @@ WORKDIR /action # Copy action metadata COPY LICENSE README.md requirements.txt ./ # Copy action code -COPY entrypoint.sh ./ -COPY src/ ./src/ +COPY entrypoint.py ./ +COPY syndicate/ ./syndicate/ # Install action requirements RUN pip install --no-cache-dir -r ./requirements.txt @@ -14,4 +14,4 @@ RUN pip install --no-cache-dir -r ./requirements.txt # Hardcoding WORKDIR into ENTRYPOINT. # Can't use environment variables in "exec" form of ENTRYPOINT, but "exec" form # is recommended. -ENTRYPOINT [ "/action/entrypoint.sh" ] \ No newline at end of file +ENTRYPOINT [ "/action/entrypoint.py" ] \ No newline at end of file diff --git a/entrypoint.py b/entrypoint.py new file mode 100755 index 0000000..a0d6c8a --- /dev/null +++ b/entrypoint.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 +import os +import sys + +ACTION_SOURCE='/action' +sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) + +import syndicate +syndicate.elsewhere(sys.argv[1].splitlines()) diff --git a/entrypoint.sh b/entrypoint.sh deleted file mode 100755 index 07e7daa..0000000 --- a/entrypoint.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env sh - -# TODO Can we programmatically match Dockerfile? -WORKDIR=/action -cd $WORKDIR -"$WORKDIR"/src/syndicate.py $@ diff --git a/src/syndicate.py b/src/syndicate.py deleted file mode 100755 index 8e808cb..0000000 --- a/src/syndicate.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python3 - -import sys -import os -from datetime import datetime - -silos = sys.argv[1:] -print(f"You want to publish to these places? {silos}") - -print(f"Do we have the necessary API keys?") -available_keys = {silo:(f"{silo}_API_KEY" in os.environ) for silo in silos } -print(available_keys) - -print(f"::set-output name=time::{datetime.now()}") diff --git a/syndicate/__init__.py b/syndicate/__init__.py new file mode 100755 index 0000000..15a5c57 --- /dev/null +++ b/syndicate/__init__.py @@ -0,0 +1,10 @@ +import sys +import os +from datetime import datetime + +def elsewhere(silos): + print(f"You want to publish to these places? {silos}") + print(f"Do we have the necessary API keys?") + available_keys = {silo:(f"{silo}_API_KEY" in os.environ) for silo in silos } + print(available_keys) + print(f"::set-output name=time::{datetime.now()}") From eca7b54c6f9e119cea8c916777dd09b9d9ded9aa Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 14:03:26 +0000 Subject: [PATCH 007/105] Add dummy silo recognition --- syndicate/__init__.py | 9 ++++++++- syndicate/silos/dev.py | 1 + 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 syndicate/silos/dev.py diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 15a5c57..98534ea 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,10 +1,17 @@ import sys import os from datetime import datetime +import importlib.util def elsewhere(silos): - print(f"You want to publish to these places? {silos}") + print(f"You want to publish to these places: {silos}") + + print("Do I know how?") + recognized_silos = {silo:bool(importlib.util.find_spec(f"syndicate.silos.{silo}")) for silo in silos} + print(recognized_silos) + print(f"Do we have the necessary API keys?") available_keys = {silo:(f"{silo}_API_KEY" in os.environ) for silo in silos } print(available_keys) + print(f"::set-output name=time::{datetime.now()}") diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py new file mode 100644 index 0000000..8e5e854 --- /dev/null +++ b/syndicate/silos/dev.py @@ -0,0 +1 @@ +print("Hello? Yes, this is DEV.") From 15207a448130aeaf08790c89a91f5767aece80f4 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 14:10:01 +0000 Subject: [PATCH 008/105] Create Pythonic .gitignore --- .gitignore | 129 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..b6e4761 --- /dev/null +++ b/.gitignore @@ -0,0 +1,129 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ From 2960ab96a8f437bd23350bf15a2e653aed4cbaff Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 14:23:17 +0000 Subject: [PATCH 009/105] Compress silo locator logic into function --- syndicate/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 98534ea..a06e4a9 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -7,7 +7,7 @@ def elsewhere(silos): print(f"You want to publish to these places: {silos}") print("Do I know how?") - recognized_silos = {silo:bool(importlib.util.find_spec(f"syndicate.silos.{silo}")) for silo in silos} + recognized_silos = {silo:bool(_locate(silo)) for silo in silos} print(recognized_silos) print(f"Do we have the necessary API keys?") @@ -15,3 +15,7 @@ def elsewhere(silos): print(available_keys) print(f"::set-output name=time::{datetime.now()}") + +### privates ### +def _locate(silo): + return importlib.util.find_spec(f'syndicate.silos.{silo}') From 6e6278aa690aea3048bd7e97a9a70eebcb887a82 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 16:16:47 +0000 Subject: [PATCH 010/105] Start fleshing out syndication driver logic --- syndicate/__init__.py | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index a06e4a9..9f7e837 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -7,15 +7,37 @@ def elsewhere(silos): print(f"You want to publish to these places: {silos}") print("Do I know how?") - recognized_silos = {silo:bool(_locate(silo)) for silo in silos} + specs = {silo:_locate(silo) for silo in silos} + recognized_silos = {silo:bool(spec) for (silo,spec) in specs.items()} print(recognized_silos) print(f"Do we have the necessary API keys?") - available_keys = {silo:(f"{silo}_API_KEY" in os.environ) for silo in silos } + available_keys = {silo:bool(_get_api_key(silo)) for (silo, known) in recognized_silos.items() if known } print(available_keys) + if any(available_keys.values()): + print("Let's do this thing.") + results = {silo:_load(spec, _get_api_key(silo)) for (silo,spec) in specs.items() if _has_api_key(silo)} + print(results) + else: + print("Sorry, can't do anything with that.") + print(f"::set-output name=time::{datetime.now()}") ### privates ### +_API_KEY = lambda s: f"{s}_API_KEY" + def _locate(silo): return importlib.util.find_spec(f'syndicate.silos.{silo}') + +def _load(silo_spec, api_key): + if silo_spec and api_key: + return importlib.import_module(silo_spec.name).do_the_thing(api_key) + else: + return None + +def _has_api_key(silo): + return _API_KEY(silo) in os.environ + +def _get_api_key(silo): + return os.getenv(_API_KEY(silo)) From e44e04bb2d34da9222b7da4c76d6334bb2f72e76 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 16:26:04 +0000 Subject: [PATCH 011/105] Add some basic Github Action utilities --- syndicate/__init__.py | 23 +++++++++++++---------- syndicate/silos/dev.py | 7 ++++++- syndicate/utils.py | 24 ++++++++++++++++++++++++ 3 files changed, 43 insertions(+), 11 deletions(-) create mode 100644 syndicate/utils.py diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 9f7e837..c3a5b32 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,28 +1,31 @@ +from datetime import datetime +from syndicate.utils import action_log, action_warn, action_output + import sys import os -from datetime import datetime import importlib.util + def elsewhere(silos): - print(f"You want to publish to these places: {silos}") + action_log(f"You want to publish to these places: {silos}") - print("Do I know how?") + action_log("Do I know how?") specs = {silo:_locate(silo) for silo in silos} recognized_silos = {silo:bool(spec) for (silo,spec) in specs.items()} - print(recognized_silos) + action_log(recognized_silos) - print(f"Do we have the necessary API keys?") + action_log(f"Do we have the necessary API keys?") available_keys = {silo:bool(_get_api_key(silo)) for (silo, known) in recognized_silos.items() if known } - print(available_keys) + action_log(available_keys) if any(available_keys.values()): - print("Let's do this thing.") + action_log("Let's do this thing.") results = {silo:_load(spec, _get_api_key(silo)) for (silo,spec) in specs.items() if _has_api_key(silo)} - print(results) + action_log(results) else: - print("Sorry, can't do anything with that.") + action_warn("Sorry, can't do anything with that.") - print(f"::set-output name=time::{datetime.now()}") + action_output("time", datetime.now()) ### privates ### _API_KEY = lambda s: f"{s}_API_KEY" diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 8e5e854..5fc0bcf 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1 +1,6 @@ -print("Hello? Yes, this is DEV.") +from syndicate.utils import action_log_group, action_log + +@action_log_group("dev") +def do_the_thing(api_key): + action_log("Hello? Yes, this is DEV.") + return True diff --git a/syndicate/utils.py b/syndicate/utils.py new file mode 100644 index 0000000..50a2e03 --- /dev/null +++ b/syndicate/utils.py @@ -0,0 +1,24 @@ +### Github Action utilities ### +def action_log(msg): + print(msg) + +def action_debug(msg): + print(f"::debug::{msg}") + +def action_warn(msg): + print(f"::warning::{msg}") + +def action_error(msg): + print(f"::error::{msg}") + +def action_output(key, value): + print(f"::set-output name={key}::{value}") + +def action_log_group(title): + def _decorator(func): + def _wrapper(*args, **kwargs): + print(f"::group::{title}") + func(*args, **kwargs) + print("::endgroup::") + return _wrapper + return _decorator From 49ad44094259ef19dab2cf94dccff894c8380b1b Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 16:34:58 +0000 Subject: [PATCH 012/105] Dummy commit to test "can't do that" scenario --- .github/workflows/main.yml | 1 - syndicate/__init__.py | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3ecafd5..6a23192 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -12,7 +12,6 @@ jobs: # Using a YAML multiline string as a workaround for a list argument. # @see https://github.community/t5/GitHub-Actions/Can-action-inputs-be-arrays/td-p/33776 silos: | - dev medium mars env: diff --git a/syndicate/__init__.py b/syndicate/__init__.py index c3a5b32..62df936 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -5,7 +5,6 @@ import os import importlib.util - def elsewhere(silos): action_log(f"You want to publish to these places: {silos}") From fb87dfa89b370622f1abb6c70e8c506ef0ce8032 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 16:39:03 +0000 Subject: [PATCH 013/105] Undo workflow mod: "can't do that" scenario works This reverts commit 33d593ba1313113d5a39542b4e027e44a935689a. --- .github/workflows/main.yml | 1 + syndicate/__init__.py | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6a23192..3ecafd5 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -12,6 +12,7 @@ jobs: # Using a YAML multiline string as a workaround for a list argument. # @see https://github.community/t5/GitHub-Actions/Can-action-inputs-be-arrays/td-p/33776 silos: | + dev medium mars env: diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 62df936..c3a5b32 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -5,6 +5,7 @@ import os import importlib.util + def elsewhere(silos): action_log(f"You want to publish to these places: {silos}") From 3f97a73b909b88ecfd20f6c3ac5f15febd54c879 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 16:50:23 +0000 Subject: [PATCH 014/105] Compress syndication driver logging --- syndicate/__init__.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index c3a5b32..aab9540 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -5,25 +5,24 @@ import os import importlib.util - def elsewhere(silos): action_log(f"You want to publish to these places: {silos}") - action_log("Do I know how?") specs = {silo:_locate(silo) for silo in silos} - recognized_silos = {silo:bool(spec) for (silo,spec) in specs.items()} - action_log(recognized_silos) + recognized_silos = {silo:spec for (silo,spec) in specs.items() if spec} + action_log(f"I know how to publish to these places: {list(recognized_silos.keys())}") + + available_keys = {silo:bool(_get_api_key(silo)) for silo in recognized_silos.keys()} - action_log(f"Do we have the necessary API keys?") - available_keys = {silo:bool(_get_api_key(silo)) for (silo, known) in recognized_silos.items() if known } - action_log(available_keys) + if not all(available_keys.values()): + action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") if any(available_keys.values()): - action_log("Let's do this thing.") + action_log("I'll do what I can.") results = {silo:_load(spec, _get_api_key(silo)) for (silo,spec) in specs.items() if _has_api_key(silo)} action_log(results) else: - action_warn("Sorry, can't do anything with that.") + action_warn("Sorry, can't help you.") action_output("time", datetime.now()) From ee2ae8707a8ff214a29f19edcd07146fb8e32bce Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 16:51:45 +0000 Subject: [PATCH 015/105] bugfix(utils): Return decorated function results --- syndicate/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/syndicate/utils.py b/syndicate/utils.py index 50a2e03..83cb90b 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -18,7 +18,8 @@ def action_log_group(title): def _decorator(func): def _wrapper(*args, **kwargs): print(f"::group::{title}") - func(*args, **kwargs) + result = func(*args, **kwargs) print("::endgroup::") + return result return _wrapper return _decorator From 30fbc16d30212cf3f94c9644370e724d1050077c Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 19:11:37 +0000 Subject: [PATCH 016/105] Add 'fetch' API integration for DEV silo --- action.yml | 1 + syndicate/silos/__init__.py | 0 syndicate/silos/dev.py | 45 +++++++++++++++++++++++++++++++++++++ tests/requirements.txt | 2 ++ tests/test_dev.py | 28 +++++++++++++++++++++++ 5 files changed, 76 insertions(+) create mode 100644 syndicate/silos/__init__.py create mode 100644 tests/requirements.txt create mode 100644 tests/test_dev.py diff --git a/action.yml b/action.yml index 27a3a61..9b16cb5 100644 --- a/action.yml +++ b/action.yml @@ -6,6 +6,7 @@ inputs: required: true default: '' outputs: + # TODO Change this to a map of publish times keyed by silo time: # id of output description: 'The time this action finished' runs: diff --git a/syndicate/silos/__init__.py b/syndicate/silos/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 5fc0bcf..980f6ce 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,6 +1,51 @@ from syndicate.utils import action_log_group, action_log +import requests @action_log_group("dev") def do_the_thing(api_key): action_log("Hello? Yes, this is DEV.") + return True + +### privates ### + +## This is a simple semantic wrapper around the DEV API, currently in beta. + +def _fetch(api_key=None, post_id=None): + assert api_key, "missing API key" + + headers = { + 'api-key': api_key + } + if post_id: + # Fetch data for given post ID + ## NOTE Currently, there's no way to fetch data for a specific post. + ## The workaround I'm using here is the best we can do: fetch and search. + endpoint = "https://dev.to/api/articles/me/all" + post_data = None + page = 0 + while not post_data: + page += 1 + response = requests.get(endpoint, params={ 'page': page }, headers=headers) + response.raise_for_status() # raise error if bad request + posts = response.json() + if posts: + post_data = next((data for data in posts if data['id'] == post_id), None) + else: + break; # No more posts to fetch + return post_data + else: + # Fetch all post data + endpoint = "https://dev.to/api/articles/me/all" + response = requests.get(endpoint, headers=headers) + response.raise_for_status() # raise error if bad request + return response.json() + +def _draft(): + pass + +def _publish(): + pass + +def _update(): + pass diff --git a/tests/requirements.txt b/tests/requirements.txt new file mode 100644 index 0000000..4dfbdaa --- /dev/null +++ b/tests/requirements.txt @@ -0,0 +1,2 @@ +pytest +requests-mock diff --git a/tests/test_dev.py b/tests/test_dev.py new file mode 100644 index 0000000..bff9cc3 --- /dev/null +++ b/tests/test_dev.py @@ -0,0 +1,28 @@ +from syndicate.silos import dev +import requests_mock +import re + +def test_fetch_request_all_posts(requests_mock): + fake_results = [] + requests_mock.get("https://dev.to/api/articles/me/all", json=fake_results) + results = dev._fetch('fake_api_key') + assert results == fake_results + +def test_fetch_request_specific_post(requests_mock): + fake_post_id = 13 + requests_mock.get("https://dev.to/api/articles/me/all", json=[{'id':fake_post_id}]) + results = dev._fetch('fake_api_key', fake_post_id) + assert results['id'] == fake_post_id + + +def test_fetch_request_invalid_post(requests_mock): + invalid_post_id = 13 + def fake_results(req, con): + # Ugh, query string parsing. But they don't expose the params at the top-level, so.... + if int( re.search(r'page=(\d+)', req.query).group(1) ) == 1: + return [{"id": invalid_post_id + 1}] + else: + return [] + requests_mock.get("https://dev.to/api/articles/me/all", json=fake_results) + results = dev._fetch('fake_api_key', invalid_post_id) + assert results is None From 88083fd006012b49e0d40757c1e1fe9c891b2e50 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 21:44:00 +0000 Subject: [PATCH 017/105] Add Github Python library as dependency --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index f229360..5130c34 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1 +1,2 @@ requests +github3.py From e1f15c8c1fcacdef2c4560827417a5f7de44a1de Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 21:48:01 +0000 Subject: [PATCH 018/105] Use Github Python API to fetch target post data debug(Dockerfile): try to fix github3 build issues debug(Dockerfile): Maybe need to use Alpine? debug(Dockerfile): Maybe I dont need to use Alpine? debug(workflow): forward GITHUB_TOKEN debug(get_commit_payload): forgot to get repo first debug(syndicate): forgot to foward retrieved posts --- .github/workflows/main.yml | 1 + Dockerfile | 3 ++- syndicate/__init__.py | 24 ++++++++++++++++++++---- syndicate/silos/dev.py | 4 +++- syndicate/utils.py | 11 +++++++++++ 5 files changed, 37 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 3ecafd5..f3381b6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -16,6 +16,7 @@ jobs: medium mars env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Set necessary API keys as secrets of your repo and specify them here in this format: # # _API_KEY diff --git a/Dockerfile b/Dockerfile index 4512444..cd3756a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,5 @@ -FROM python:3-alpine +# Would like to use python:3-alpine, but it doesn't have 'gcc' and the github3 library needs that. +FROM python:3 WORKDIR /action diff --git a/syndicate/__init__.py b/syndicate/__init__.py index aab9540..e9907d8 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,6 +1,7 @@ from datetime import datetime -from syndicate.utils import action_log, action_warn, action_output +from syndicate.utils import action_log, action_warn, action_output, get_posts +import github3 import sys import os import importlib.util @@ -19,7 +20,12 @@ def elsewhere(silos): if any(available_keys.values()): action_log("I'll do what I can.") - results = {silo:_load(spec, _get_api_key(silo)) for (silo,spec) in specs.items() if _has_api_key(silo)} + + commit = _get_commit_payload() + assert commit, "could not fetch commit payload" + posts = get_posts(commit) + + results = {silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo)} action_log(results) else: action_warn("Sorry, can't help you.") @@ -32,9 +38,9 @@ def elsewhere(silos): def _locate(silo): return importlib.util.find_spec(f'syndicate.silos.{silo}') -def _load(silo_spec, api_key): +def _syndicate(silo_spec, api_key, posts): if silo_spec and api_key: - return importlib.import_module(silo_spec.name).do_the_thing(api_key) + return importlib.import_module(silo_spec.name).do_the_thing(posts, api_key) else: return None @@ -43,3 +49,13 @@ def _has_api_key(silo): def _get_api_key(silo): return os.getenv(_API_KEY(silo)) + +def _get_commit_payload(): + assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" + assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" + assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" + + gh = github3.login(token=os.getenv("GITHUB_TOKEN")) + repo = gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) + commit = repo.commit(os.getenv("GITHUB_SHA")) + return commit diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 980f6ce..9ef5ac4 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -2,8 +2,10 @@ import requests @action_log_group("dev") -def do_the_thing(api_key): +def do_the_thing(posts, api_key): action_log("Hello? Yes, this is DEV.") + action_log("You want to syndicate these posts:") + action_log(posts) return True diff --git a/syndicate/utils.py b/syndicate/utils.py index 83cb90b..2a19718 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -23,3 +23,14 @@ def _wrapper(*args, **kwargs): return result return _wrapper return _decorator + +def get_posts(commit=None, post_dir='pages/posts'): + assert commit, 'missing commit payload' + return { + file['filename']:file['status'] + for file in commit.files + if ( + file['filename'].startswith(post_dir) and + file['status'] in ('created', 'modified') + ) + } From b40e1c16cad08ed9abc91cacb6142d642a81304f Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 22:30:32 +0000 Subject: [PATCH 019/105] bugfix(get_posts): wrong status for file creation --- syndicate/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syndicate/utils.py b/syndicate/utils.py index 2a19718..9cf9ea3 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -31,6 +31,6 @@ def get_posts(commit=None, post_dir='pages/posts'): for file in commit.files if ( file['filename'].startswith(post_dir) and - file['status'] in ('created', 'modified') + file['status'] in ('added', 'modified') ) } From 4fb849ef5b6bc9d16cc31a97eca6c793940965ff Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 22:45:57 +0000 Subject: [PATCH 020/105] Restructure post collector get posts earlier dummy post creation --- pages/posts/dummy.md | 4 ++++ syndicate/__init__.py | 10 ++++++---- syndicate/utils.py | 9 +++------ 3 files changed, 13 insertions(+), 10 deletions(-) create mode 100644 pages/posts/dummy.md diff --git a/pages/posts/dummy.md b/pages/posts/dummy.md new file mode 100644 index 0000000..2955855 --- /dev/null +++ b/pages/posts/dummy.md @@ -0,0 +1,4 @@ +--- +title: 'dummy post' +tags: anonymous, random +--- diff --git a/syndicate/__init__.py b/syndicate/__init__.py index e9907d8..d7fd290 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -7,6 +7,11 @@ import importlib.util def elsewhere(silos): + commit = _get_commit_payload() + assert commit, "could not fetch commit payload" + posts = get_posts(commit) + assert posts, "no posts to update" + action_log(f"You want to publish to these places: {silos}") specs = {silo:_locate(silo) for silo in silos} @@ -21,11 +26,8 @@ def elsewhere(silos): if any(available_keys.values()): action_log("I'll do what I can.") - commit = _get_commit_payload() - assert commit, "could not fetch commit payload" - posts = get_posts(commit) - results = {silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo)} + action_log(results) else: action_warn("Sorry, can't help you.") diff --git a/syndicate/utils.py b/syndicate/utils.py index 9cf9ea3..4b20116 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -26,11 +26,8 @@ def _wrapper(*args, **kwargs): def get_posts(commit=None, post_dir='pages/posts'): assert commit, 'missing commit payload' + posts = [file for file in commit.files if file['filename'].startswith(post_dir)] return { - file['filename']:file['status'] - for file in commit.files - if ( - file['filename'].startswith(post_dir) and - file['status'] in ('added', 'modified') - ) + 'added': [post['filename'] for post in posts if post['status'] == 'added'], + 'modified': [post['filename'] for post in posts if post['status'] == 'modified'] } From a34aac205b9a9f799665f0ee47d9ecb795819588 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 23:53:12 +0000 Subject: [PATCH 021/105] Add python-frontmatter as dependency --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 5130c34..ce601f7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1,3 @@ requests github3.py +python-frontmatter From a618dacad6009f0311d18498f7fb62be65c1c9c1 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 31 Jan 2020 23:56:55 +0000 Subject: [PATCH 022/105] Move commit payload fetcher to utils --- syndicate/__init__.py | 15 +-------------- syndicate/utils.py | 25 ++++++++++++++++++++++--- 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index d7fd290..d7abdf8 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,15 +1,12 @@ from datetime import datetime from syndicate.utils import action_log, action_warn, action_output, get_posts -import github3 import sys import os import importlib.util def elsewhere(silos): - commit = _get_commit_payload() - assert commit, "could not fetch commit payload" - posts = get_posts(commit) + posts = get_posts() assert posts, "no posts to update" action_log(f"You want to publish to these places: {silos}") @@ -51,13 +48,3 @@ def _has_api_key(silo): def _get_api_key(silo): return os.getenv(_API_KEY(silo)) - -def _get_commit_payload(): - assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" - assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" - assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" - - gh = github3.login(token=os.getenv("GITHUB_TOKEN")) - repo = gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) - commit = repo.commit(os.getenv("GITHUB_SHA")) - return commit diff --git a/syndicate/utils.py b/syndicate/utils.py index 4b20116..127dab9 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -1,3 +1,6 @@ +import github3 +import os + ### Github Action utilities ### def action_log(msg): print(msg) @@ -24,9 +27,25 @@ def _wrapper(*args, **kwargs): return _wrapper return _decorator -def get_posts(commit=None, post_dir='pages/posts'): - assert commit, 'missing commit payload' - posts = [file for file in commit.files if file['filename'].startswith(post_dir)] +def get_commit_payload(): + assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" + assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" + assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" + + gh = github3.login(token=os.getenv("GITHUB_TOKEN")) + repo = gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) + commit = repo.commit(os.getenv("GITHUB_SHA")) + return commit + +def get_posts(post_dir='pages/posts'): + commit = get_commit_payload() + assert commit, "could not fetch commit payload" + + files = [file for file in commit.files if file['filename'].startswith(post_dir)] + + # TODO + posts = [] + return { 'added': [post['filename'] for post in posts if post['status'] == 'added'], 'modified': [post['filename'] for post in posts if post['status'] == 'modified'] From edacdea5b26172ed7e742e9a6d8e297fc720aa38 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 00:05:39 +0000 Subject: [PATCH 023/105] Separate Github handle creation retrieve post contents prep draft payload --- pages/posts/dummy.md | 2 +- syndicate/silos/dev.py | 27 ++++++++++++++++++++++++--- syndicate/utils.py | 29 ++++++++++++++++++----------- 3 files changed, 43 insertions(+), 15 deletions(-) diff --git a/pages/posts/dummy.md b/pages/posts/dummy.md index 2955855..4dbd1a1 100644 --- a/pages/posts/dummy.md +++ b/pages/posts/dummy.md @@ -1,4 +1,4 @@ --- title: 'dummy post' -tags: anonymous, random +tags: anonymous --- diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 9ef5ac4..531d15c 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,4 +1,5 @@ -from syndicate.utils import action_log_group, action_log +from syndicate.utils import action_log_group, action_log, get_canonical_url +import frontmatter as frontmatter import requests @action_log_group("dev") @@ -43,8 +44,28 @@ def _fetch(api_key=None, post_id=None): response.raise_for_status() # raise error if bad request return response.json() -def _draft(): - pass +def _draft(post, api_key=None): + assert api_key, "missing API key" + assert post, "missing post" + + raw_contents = post.decoded.decode('utf-8') + front, _ = frontmatter.parse(raw_contents) + assert front.get('title'), "can't draft an article without a title" + + payload = { + 'article': { + 'title': front['title'], + 'published': False, + 'tags': front.get('tags', []), + 'series': front.get('series', None), + 'canonical_url': get_canonical_url(post.path), + 'body_markdown': raw_contents + } + } + + action_log("Drafting a post with this payload:") + action_log(payload) + # endpoint = "https://dev.to/api/articles" def _publish(): pass diff --git a/syndicate/utils.py b/syndicate/utils.py index 127dab9..c5ac3f6 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -1,3 +1,4 @@ +import functools import github3 import os @@ -27,26 +28,32 @@ def _wrapper(*args, **kwargs): return _wrapper return _decorator +# Memoize authentication +@functools.lru_cache(maxsize=1) +def github(): + assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" + return github3.login(token=os.getenv("GITHUB_TOKEN")) + def get_commit_payload(): assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" - assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" - gh = github3.login(token=os.getenv("GITHUB_TOKEN")) - repo = gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) + repo = github().repository(*os.getenv("GITHUB_REPOSITORY").split('/')) commit = repo.commit(os.getenv("GITHUB_SHA")) - return commit + return (repo, commit) def get_posts(post_dir='pages/posts'): - commit = get_commit_payload() + repo, commit = get_commit_payload() assert commit, "could not fetch commit payload" - files = [file for file in commit.files if file['filename'].startswith(post_dir)] - - # TODO - posts = [] + posts = [file for file in commit.files if file['filename'].startswith(post_dir)] + post_contents = {post['status']:repo.file_contents(post['filename'], commit.sha) for post in posts} return { - 'added': [post['filename'] for post in posts if post['status'] == 'added'], - 'modified': [post['filename'] for post in posts if post['status'] == 'modified'] + 'added': [contents for (status, contents) in post_contents.items() if status == 'added'], + 'modified': [contents for (status, contents) in post_contents.items() if status == 'modified'] } + +def get_canonical_url(post_path): + assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" + return f"https://github.com/{os.getenv('GITHUB_REPOSITORY')}/{post_path}" From 49d7a062df17d4f7a93eef02d94a749245147cb4 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 00:49:46 +0000 Subject: [PATCH 024/105] Drafting works test dummy post creation use blob url as canonical URL fix tag parsing dummy edit to test workflow dummy create to test workflow test draft creation dummy edit for testing draft creation try without json remove frontmatter from body markdown in payload try json again, but changing the Content-Type header handle yaml sequences better this is apparently necessary, otherwise I can't import `syndicate` add test for DEV fetch 'error when API key missing' re-arrange DEV _fetch parameters --- pages/posts/another_dummy.md | 11 ++++++++ pages/posts/best_dummy.md | 14 ++++++++++ pages/posts/yet_another_dummy.md | 11 ++++++++ syndicate/silos/dev.py | 48 +++++++++++++++++++++----------- syndicate/utils.py | 16 +++++++++-- tests/__init__.py | 0 tests/test_dev.py | 11 ++++++-- 7 files changed, 89 insertions(+), 22 deletions(-) create mode 100644 pages/posts/another_dummy.md create mode 100644 pages/posts/best_dummy.md create mode 100644 pages/posts/yet_another_dummy.md create mode 100644 tests/__init__.py diff --git a/pages/posts/another_dummy.md b/pages/posts/another_dummy.md new file mode 100644 index 0000000..da99804 --- /dev/null +++ b/pages/posts/another_dummy.md @@ -0,0 +1,11 @@ +--- +title: "another dummy post" +tags: thinkdeep, discuss,magic +series: +--- + +There could be anything in a box. + +It could be a boat! + +Always take the box. diff --git a/pages/posts/best_dummy.md b/pages/posts/best_dummy.md new file mode 100644 index 0000000..56fdec3 --- /dev/null +++ b/pages/posts/best_dummy.md @@ -0,0 +1,14 @@ +--- +title: Best dummy post evar +tags: + - thinkdeep + - discuss + - magic +series: +--- + +There could be anything in a box. + +It could be a boat! + +Always take the box. diff --git a/pages/posts/yet_another_dummy.md b/pages/posts/yet_another_dummy.md new file mode 100644 index 0000000..da99804 --- /dev/null +++ b/pages/posts/yet_another_dummy.md @@ -0,0 +1,11 @@ +--- +title: "another dummy post" +tags: thinkdeep, discuss,magic +series: +--- + +There could be anything in a box. + +It could be a boat! + +Always take the box. diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 531d15c..2b1a9d4 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,4 +1,4 @@ -from syndicate.utils import action_log_group, action_log, get_canonical_url +from syndicate.utils import action_log_group, action_log, get_canonical_url, yaml_sequence import frontmatter as frontmatter import requests @@ -8,13 +8,18 @@ def do_the_thing(posts, api_key): action_log("You want to syndicate these posts:") action_log(posts) + for post in posts['added']: + results = _draft(post, api_key) + action_log("Draft success!") + action_log(results) + return True ### privates ### ## This is a simple semantic wrapper around the DEV API, currently in beta. -def _fetch(api_key=None, post_id=None): +def _fetch(post_id=None, api_key=None): assert api_key, "missing API key" headers = { @@ -48,27 +53,36 @@ def _draft(post, api_key=None): assert api_key, "missing API key" assert post, "missing post" - raw_contents = post.decoded.decode('utf-8') - front, _ = frontmatter.parse(raw_contents) - assert front.get('title'), "can't draft an article without a title" - - payload = { - 'article': { - 'title': front['title'], - 'published': False, - 'tags': front.get('tags', []), - 'series': front.get('series', None), - 'canonical_url': get_canonical_url(post.path), - 'body_markdown': raw_contents - } - } + payload = _payload_for(post) action_log("Drafting a post with this payload:") action_log(payload) - # endpoint = "https://dev.to/api/articles" + endpoint = "https://dev.to/api/articles" + headers = { + 'api-key': api_key + } + response = requests.post(endpoint, headers=headers, json=payload) + response.raise_for_status() + return response.json() def _publish(): pass def _update(): pass + +def _payload_for(post): + raw_contents = post.decoded.decode('utf-8') + front, body = frontmatter.parse(raw_contents) + assert front.get('title'), "article is missing a title" + + return { + 'article': { + 'title': front['title'], + 'published': False, + 'tags': yaml_sequence(front.get('tags', None)), + 'series': front.get('series', None), + 'canonical_url': get_canonical_url(post), + 'body_markdown': body + } + } diff --git a/syndicate/utils.py b/syndicate/utils.py index c5ac3f6..cd915f3 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -54,6 +54,18 @@ def get_posts(post_dir='pages/posts'): 'modified': [contents for (status, contents) in post_contents.items() if status == 'modified'] } -def get_canonical_url(post_path): +def get_canonical_url(post): assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" - return f"https://github.com/{os.getenv('GITHUB_REPOSITORY')}/{post_path}" + # return f"https://github.com/{os.getenv('GITHUB_REPOSITORY')}/{post.path}" + return post.html_url + +def yaml_sequence(sequence): + JUST_GIVE_IT_BACK = lambda s: s + cases = { + # Support simple comma-separated YAML sequences + type(''): lambda s: [item.strip() for item in sequence.split(',')], + # If the YAML sequence has already been processed into a list, just give it back + type([]): JUST_GIVE_IT_BACK + } + # If I know how to handle it, handle it; otherwise, just give it back + return cases.get(type(sequence), JUST_GIVE_IT_BACK)(sequence) diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_dev.py b/tests/test_dev.py index bff9cc3..e172b7a 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -1,17 +1,22 @@ from syndicate.silos import dev +import pytest import requests_mock import re +def test_fetch_error_when_api_key_missing(): + with pytest.raises(AssertionError): + dev._fetch() + def test_fetch_request_all_posts(requests_mock): fake_results = [] requests_mock.get("https://dev.to/api/articles/me/all", json=fake_results) - results = dev._fetch('fake_api_key') + results = dev._fetch(api_key='fake_api_key') assert results == fake_results def test_fetch_request_specific_post(requests_mock): fake_post_id = 13 requests_mock.get("https://dev.to/api/articles/me/all", json=[{'id':fake_post_id}]) - results = dev._fetch('fake_api_key', fake_post_id) + results = dev._fetch(fake_post_id, api_key='fake_api_key') assert results['id'] == fake_post_id @@ -24,5 +29,5 @@ def fake_results(req, con): else: return [] requests_mock.get("https://dev.to/api/articles/me/all", json=fake_results) - results = dev._fetch('fake_api_key', invalid_post_id) + results = dev._fetch(invalid_post_id, api_key='fake_api_key') assert results is None From ddea936a68d7a97870649e73ccdb9fd20ea9467b Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 12:06:40 +0000 Subject: [PATCH 025/105] Add tests for DEV draft ensure post has frontmatter before accessing it compress 'github' into just 'repo' compress commit payload logic --- pages/posts/best_dummy.md | 1 + syndicate/silos/dev.py | 2 ++ syndicate/utils.py | 23 +++++++++++++---------- tests/mocks.py | 16 ++++++++++++++++ tests/test_dev.py | 23 +++++++++++++++++++++++ 5 files changed, 55 insertions(+), 10 deletions(-) create mode 100644 tests/mocks.py diff --git a/pages/posts/best_dummy.md b/pages/posts/best_dummy.md index 56fdec3..0eabaf6 100644 --- a/pages/posts/best_dummy.md +++ b/pages/posts/best_dummy.md @@ -4,6 +4,7 @@ tags: - thinkdeep - discuss - magic + - foon series: --- diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 2b1a9d4..21f73a2 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -73,6 +73,8 @@ def _update(): def _payload_for(post): raw_contents = post.decoded.decode('utf-8') + assert frontmatter.checks(raw_contents) + front, body = frontmatter.parse(raw_contents) assert front.get('title'), "article is missing a title" diff --git a/syndicate/utils.py b/syndicate/utils.py index cd915f3..bbe8943 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -30,24 +30,27 @@ def _wrapper(*args, **kwargs): # Memoize authentication @functools.lru_cache(maxsize=1) -def github(): +def repo(): assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" - return github3.login(token=os.getenv("GITHUB_TOKEN")) + assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" + + gh = github3.login(token=os.getenv("GITHUB_TOKEN")) + return gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) def get_commit_payload(): - assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" + return repo().commit(os.getenv("GITHUB_SHA")).files - repo = github().repository(*os.getenv("GITHUB_REPOSITORY").split('/')) - commit = repo.commit(os.getenv("GITHUB_SHA")) - return (repo, commit) +def file_contents(filename): + assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" + return repo().file_contents(filename, os.getenv("GITHUB_SHA")) def get_posts(post_dir='pages/posts'): - repo, commit = get_commit_payload() - assert commit, "could not fetch commit payload" + files = get_commit_payload() + assert files, "could not fetch commit payload" - posts = [file for file in commit.files if file['filename'].startswith(post_dir)] - post_contents = {post['status']:repo.file_contents(post['filename'], commit.sha) for post in posts} + posts = [file for file in files if file['filename'].startswith(post_dir)] + post_contents = {post['status']:file_contents(post['filename']) for post in posts} return { 'added': [contents for (status, contents) in post_contents.items() if status == 'added'], diff --git a/tests/mocks.py b/tests/mocks.py new file mode 100644 index 0000000..148bdf9 --- /dev/null +++ b/tests/mocks.py @@ -0,0 +1,16 @@ +import frontmatter +import textwrap + +class MockPost: + def __init__(self): + self.raw_contents = textwrap.dedent( + """ + --- + title: A beautiful mock + tags: beauty, fake + --- + What is a body? + """) + self.front, _ = frontmatter.parse(self.raw_contents) + self.decoded = self.raw_contents.encode('utf-8') + self.html_url = 'https://silo.com/a-beautiful-mock' diff --git a/tests/test_dev.py b/tests/test_dev.py index e172b7a..c2ebc92 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -1,5 +1,7 @@ from syndicate.silos import dev +from .mocks import MockPost import pytest +import requests import requests_mock import re @@ -31,3 +33,24 @@ def fake_results(req, con): requests_mock.get("https://dev.to/api/articles/me/all", json=fake_results) results = dev._fetch(invalid_post_id, api_key='fake_api_key') assert results is None + +def test_draft_error_when_api_key_missing(): + with pytest.raises(AssertionError): + dev._draft('asdf') + +def test_draft_error_when_post_missing(): + with pytest.raises(AssertionError): + dev._draft(None) + +def test_draft_error_when_request_fails(requests_mock, monkeypatch): + monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') + requests_mock.post("https://dev.to/api/articles", status_code=422) + post = MockPost() + with pytest.raises(requests.exceptions.HTTPError): + dev._draft(post, api_key='fake_api_key') + +def test_draft_returns_something_on_success(requests_mock, monkeypatch): + fake_results = { 'type_of': 'article', 'id': 42 } + requests_mock.post("https://dev.to/api/articles", status_code=200, json=fake_results) + monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') + assert dev._draft(MockPost(), api_key='fake_api_key') From 385a0e25fb265c645beda46971bcc58a58c3661b Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 12:58:57 +0000 Subject: [PATCH 026/105] Point example workflow at development action --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index f3381b6..69bee14 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -7,7 +7,7 @@ jobs: steps: - name: Syndicate to silos id: syndicate - uses: dabrady/syndicate@master + uses: dabrady/syndicate@develop with: # Using a YAML multiline string as a workaround for a list argument. # @see https://github.community/t5/GitHub-Actions/Can-action-inputs-be-arrays/td-p/33776 From 23b84f582b561e9b1effb57f14d18e25e44a39eb Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 12:46:11 +0000 Subject: [PATCH 027/105] Update example workflow: trigger by post change --- .github/workflows/main.yml | 8 +++++++- pages/posts/best_dummy.md | 1 - 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 69bee14..9ed81df 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,4 +1,10 @@ -on: [push] +on: + push: + branches: + - master + - develop + paths: + - 'pages/**/*.mdx?' jobs: syndicate: diff --git a/pages/posts/best_dummy.md b/pages/posts/best_dummy.md index 0eabaf6..56fdec3 100644 --- a/pages/posts/best_dummy.md +++ b/pages/posts/best_dummy.md @@ -4,7 +4,6 @@ tags: - thinkdeep - discuss - magic - - foon series: --- From a3a10e5a0783c5efa2d354e5ffeef5105575427e Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 15:44:37 +0000 Subject: [PATCH 028/105] Add step to inject post ID into front and push it --- syndicate/silos/dev.py | 8 ++++++-- syndicate/utils.py | 15 +++++++++++++++ 2 files changed, 21 insertions(+), 2 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 21f73a2..804a50e 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,4 +1,4 @@ -from syndicate.utils import action_log_group, action_log, get_canonical_url, yaml_sequence +from syndicate.utils import action_log_group, action_log, get_canonical_url, yaml_sequence, commit_silo_id import frontmatter as frontmatter import requests @@ -63,7 +63,11 @@ def _draft(post, api_key=None): } response = requests.post(endpoint, headers=headers, json=payload) response.raise_for_status() - return response.json() + + results = response.json() + assert results['id'] + commit_silo_id(post, results['id'], silo='dev') + return results def _publish(): pass diff --git a/syndicate/utils.py b/syndicate/utils.py index bbe8943..8c9b69a 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -72,3 +72,18 @@ def yaml_sequence(sequence): } # If I know how to handle it, handle it; otherwise, just give it back return cases.get(type(sequence), JUST_GIVE_IT_BACK)(sequence) + +def commit_silo_id(post, post_id, silo=None): + assert post, "missing post info" + assert post_id, "missing post ID" + assert silo, "silo not specified" + + fronted_post = frontmatter.loads(post.decoded.decode('utf-8')) + fronted_post[f'{silo}-id'] = post_id + + action_log(f"Updating frontmatter with ID for {silo}") + pushed_change = post.update( + f'syndicate({silo}): adding post ID to frontmatter', + frontmatter.dumps(fronted_post) + ) + action_log(pushed_change) From b159142307a2d11919922b162b95b669a53dd675 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 15:53:45 +0000 Subject: [PATCH 029/105] Default to empty tags, not 'None' tags --- syndicate/silos/dev.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 804a50e..52e6ef9 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -86,7 +86,7 @@ def _payload_for(post): 'article': { 'title': front['title'], 'published': False, - 'tags': yaml_sequence(front.get('tags', None)), + 'tags': yaml_sequence(front.get('tags', [])), 'series': front.get('series', None), 'canonical_url': get_canonical_url(post), 'body_markdown': body From 05ff010903e3a0fa1211ecbca5d1063c06306127 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 16:02:38 +0000 Subject: [PATCH 030/105] Add draft failure logging --- syndicate/silos/dev.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 52e6ef9..1734429 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,4 +1,4 @@ -from syndicate.utils import action_log_group, action_log, get_canonical_url, yaml_sequence, commit_silo_id +from syndicate.utils import action_log_group, action_log, action_error, get_canonical_url, yaml_sequence, commit_silo_id import frontmatter as frontmatter import requests @@ -62,7 +62,11 @@ def _draft(post, api_key=None): 'api-key': api_key } response = requests.post(endpoint, headers=headers, json=payload) - response.raise_for_status() + + if response.status_code != requests.codes.created: + action_error("Failed to create draft!") + action_error(response.json()) + return None results = response.json() assert results['id'] From 55218f93eb53c17bae1791ef3b038bbcc1e1dc97 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 16:13:31 +0000 Subject: [PATCH 031/105] Report failures more accurately missed an import encode post contents in update oneliner for debugging dumbbbbbbbb post delete dummy posts dummy post to trigger workflow syndicate(dev): adding post ID to frontmatter --- pages/posts/another_dummy.md | 11 ----------- pages/posts/best_dummy.md | 14 -------------- pages/posts/dumb.md | 6 ++++++ pages/posts/dummy.md | 4 ---- pages/posts/yet_another_dummy.md | 11 ----------- syndicate/silos/dev.py | 17 +++++++++++------ syndicate/utils.py | 6 ++---- 7 files changed, 19 insertions(+), 50 deletions(-) delete mode 100644 pages/posts/another_dummy.md delete mode 100644 pages/posts/best_dummy.md create mode 100644 pages/posts/dumb.md delete mode 100644 pages/posts/dummy.md delete mode 100644 pages/posts/yet_another_dummy.md diff --git a/pages/posts/another_dummy.md b/pages/posts/another_dummy.md deleted file mode 100644 index da99804..0000000 --- a/pages/posts/another_dummy.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "another dummy post" -tags: thinkdeep, discuss,magic -series: ---- - -There could be anything in a box. - -It could be a boat! - -Always take the box. diff --git a/pages/posts/best_dummy.md b/pages/posts/best_dummy.md deleted file mode 100644 index 56fdec3..0000000 --- a/pages/posts/best_dummy.md +++ /dev/null @@ -1,14 +0,0 @@ ---- -title: Best dummy post evar -tags: - - thinkdeep - - discuss - - magic -series: ---- - -There could be anything in a box. - -It could be a boat! - -Always take the box. diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md new file mode 100644 index 0000000..528ef50 --- /dev/null +++ b/pages/posts/dumb.md @@ -0,0 +1,6 @@ +--- +dev-id: 252963 +title: this is not a post +--- + +These are not the contents of this post. \ No newline at end of file diff --git a/pages/posts/dummy.md b/pages/posts/dummy.md deleted file mode 100644 index 4dbd1a1..0000000 --- a/pages/posts/dummy.md +++ /dev/null @@ -1,4 +0,0 @@ ---- -title: 'dummy post' -tags: anonymous ---- diff --git a/pages/posts/yet_another_dummy.md b/pages/posts/yet_another_dummy.md deleted file mode 100644 index da99804..0000000 --- a/pages/posts/yet_another_dummy.md +++ /dev/null @@ -1,11 +0,0 @@ ---- -title: "another dummy post" -tags: thinkdeep, discuss,magic -series: ---- - -There could be anything in a box. - -It could be a boat! - -Always take the box. diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 1734429..2ea6df9 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,5 +1,5 @@ -from syndicate.utils import action_log_group, action_log, action_error, get_canonical_url, yaml_sequence, commit_silo_id -import frontmatter as frontmatter +from syndicate.utils import action_log_group, action_log, action_warn, action_error, get_canonical_url, yaml_sequence, commit_silo_id +import frontmatter import requests @action_log_group("dev") @@ -8,12 +8,17 @@ def do_the_thing(posts, api_key): action_log("You want to syndicate these posts:") action_log(posts) + success = True for post in posts['added']: results = _draft(post, api_key) - action_log("Draft success!") - action_log(results) - - return True + if results: + action_log("Draft success!") + action_log(results) + else: + action_warn("Draft failure D:") + success = False + + return success ### privates ### diff --git a/syndicate/utils.py b/syndicate/utils.py index 8c9b69a..9149158 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -1,3 +1,4 @@ +import frontmatter import functools import github3 import os @@ -82,8 +83,5 @@ def commit_silo_id(post, post_id, silo=None): fronted_post[f'{silo}-id'] = post_id action_log(f"Updating frontmatter with ID for {silo}") - pushed_change = post.update( - f'syndicate({silo}): adding post ID to frontmatter', - frontmatter.dumps(fronted_post) - ) + pushed_change = post.update(f'syndicate({silo}): adding post id to frontmatter', frontmatter.dumps(fronted_post).encode('utf-8')) action_log(pushed_change) From b153f51762ffa2d08c64c04c35e4a8cd1e816608 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 16:51:34 +0000 Subject: [PATCH 032/105] Revert "oneliner for debugging" This reverts commit b61d52db39d21b00ee987dfd192e97dc1b11497c. change silo post id to snake_case --- pages/posts/dumb.md | 4 ++-- syndicate/utils.py | 7 +++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index 528ef50..58d166d 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1,6 +1,6 @@ --- -dev-id: 252963 +dev_id: 252963 title: this is not a post --- -These are not the contents of this post. \ No newline at end of file +These are not the contents of this post. diff --git a/syndicate/utils.py b/syndicate/utils.py index 9149158..e7e0660 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -80,8 +80,11 @@ def commit_silo_id(post, post_id, silo=None): assert silo, "silo not specified" fronted_post = frontmatter.loads(post.decoded.decode('utf-8')) - fronted_post[f'{silo}-id'] = post_id + fronted_post[f'{silo}_id'] = post_id action_log(f"Updating frontmatter with ID for {silo}") - pushed_change = post.update(f'syndicate({silo}): adding post id to frontmatter', frontmatter.dumps(fronted_post).encode('utf-8')) + pushed_change = post.update( + f'syndicate({silo}): adding post ID to frontmatter', + frontmatter.dumps(fronted_post).encode('utf-8') + ) action_log(pushed_change) From 68b530b915e37838153e1a171a28d1ebc61d16b4 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 17:01:56 +0000 Subject: [PATCH 033/105] Allow configuration of post directory --- .github/workflows/main.yml | 4 ++++ syndicate/utils.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9ed81df..bcea9cd 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -22,7 +22,11 @@ jobs: medium mars env: + # This is provided to all actions by Github. GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + # Tell me the path (relative to the project root) where your posts live, so I can find them. + # Defaults to 'posts'. + SYNDICATE_POST_DIR: 'pages/posts' # Set necessary API keys as secrets of your repo and specify them here in this format: # # _API_KEY diff --git a/syndicate/utils.py b/syndicate/utils.py index e7e0660..9c4d626 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -46,7 +46,7 @@ def file_contents(filename): assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" return repo().file_contents(filename, os.getenv("GITHUB_SHA")) -def get_posts(post_dir='pages/posts'): +def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): files = get_commit_payload() assert files, "could not fetch commit payload" From efec1725bc8d72b581399dddb6d72841897710d2 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 17:45:41 +0000 Subject: [PATCH 034/105] Expect API keys in SCREAMING_SNAKE_CASE --- .github/workflows/main.yml | 17 ++++++++++------- syndicate/__init__.py | 6 +++--- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bcea9cd..5daeba8 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -17,25 +17,28 @@ jobs: with: # Using a YAML multiline string as a workaround for a list argument. # @see https://github.community/t5/GitHub-Actions/Can-action-inputs-be-arrays/td-p/33776 + # Names are snake_case, case-insensitive. silos: | - dev - medium - mars + DEV + Medium + Planet_Mars env: - # This is provided to all actions by Github. + # This is provided to all actions by Github, and needed to access the posts + # in your repository. + # @see https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Tell me the path (relative to the project root) where your posts live, so I can find them. # Defaults to 'posts'. SYNDICATE_POST_DIR: 'pages/posts' # Set necessary API keys as secrets of your repo and specify them here in this format: # - # _API_KEY + # _API_KEY # # e.g. # - # medium_API_KEY + # MEDIUM_API_KEY # # so that the action can find them easily when needed. - dev_API_KEY: ${{ secrets.dev_API_KEY }} + DEV_API_KEY: ${{ secrets.DEV_API_KEY }} - name: Get the output time run: echo "The time was ${{ steps.syndicate.outputs.time }}" diff --git a/syndicate/__init__.py b/syndicate/__init__.py index d7abdf8..7b59203 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -15,7 +15,7 @@ def elsewhere(silos): recognized_silos = {silo:spec for (silo,spec) in specs.items() if spec} action_log(f"I know how to publish to these places: {list(recognized_silos.keys())}") - available_keys = {silo:bool(_get_api_key(silo)) for silo in recognized_silos.keys()} + available_keys = {silo:_has_api_key(silo) for silo in recognized_silos.keys()} if not all(available_keys.values()): action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") @@ -32,10 +32,10 @@ def elsewhere(silos): action_output("time", datetime.now()) ### privates ### -_API_KEY = lambda s: f"{s}_API_KEY" +_API_KEY = lambda s: f"{s.upper()}_API_KEY" def _locate(silo): - return importlib.util.find_spec(f'syndicate.silos.{silo}') + return importlib.util.find_spec(f'syndicate.silos.{silo.lower()}') def _syndicate(silo_spec, api_key, posts): if silo_spec and api_key: From 51bc750029e6d646f36590a926b7971a57a5b033 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 17:46:38 +0000 Subject: [PATCH 035/105] Rename silo entrypoint --- syndicate/__init__.py | 2 +- syndicate/silos/dev.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 7b59203..0b51cd3 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -39,7 +39,7 @@ def _locate(silo): def _syndicate(silo_spec, api_key, posts): if silo_spec and api_key: - return importlib.import_module(silo_spec.name).do_the_thing(posts, api_key) + return importlib.import_module(silo_spec.name).syndicate(posts, api_key) else: return None diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 2ea6df9..d13d1ee 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -3,7 +3,7 @@ import requests @action_log_group("dev") -def do_the_thing(posts, api_key): +def syndicate(posts, api_key): action_log("Hello? Yes, this is DEV.") action_log("You want to syndicate these posts:") action_log(posts) From 918923d32f99fc2cb9839c6376911fa0c1ef2cb0 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 18:20:38 +0000 Subject: [PATCH 036/105] Add error message when post has no frontmatter --- syndicate/silos/dev.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index d13d1ee..e9edfaf 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -86,7 +86,7 @@ def _update(): def _payload_for(post): raw_contents = post.decoded.decode('utf-8') - assert frontmatter.checks(raw_contents) + assert frontmatter.checks(raw_contents), "article is missing frontmatter" front, body = frontmatter.parse(raw_contents) assert front.get('title'), "article is missing a title" From 82a56c94fb108b80e65c5d672397a02f09726bf9 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 18:22:06 +0000 Subject: [PATCH 037/105] Fix draft specs --- tests/mocks.py | 5 ++++- tests/test_dev.py | 11 ++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/mocks.py b/tests/mocks.py index 148bdf9..84260ab 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -10,7 +10,10 @@ def __init__(self): tags: beauty, fake --- What is a body? - """) + """).strip() self.front, _ = frontmatter.parse(self.raw_contents) self.decoded = self.raw_contents.encode('utf-8') self.html_url = 'https://silo.com/a-beautiful-mock' + + def update(self, *args, **kwargs): + pass diff --git a/tests/test_dev.py b/tests/test_dev.py index c2ebc92..2e8c8f0 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -42,15 +42,12 @@ def test_draft_error_when_post_missing(): with pytest.raises(AssertionError): dev._draft(None) -def test_draft_error_when_request_fails(requests_mock, monkeypatch): +def test_draft_returns_nothing_when_request_fails(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') - requests_mock.post("https://dev.to/api/articles", status_code=422) - post = MockPost() - with pytest.raises(requests.exceptions.HTTPError): - dev._draft(post, api_key='fake_api_key') + requests_mock.post("https://dev.to/api/articles", status_code=422, json={"error": "you made a fake request"}) + assert not dev._draft(MockPost(), api_key='fake_api_key') def test_draft_returns_something_on_success(requests_mock, monkeypatch): - fake_results = { 'type_of': 'article', 'id': 42 } - requests_mock.post("https://dev.to/api/articles", status_code=200, json=fake_results) monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') + requests_mock.post("https://dev.to/api/articles", status_code=201, json={ 'type_of': 'article', 'id': 42 }) assert dev._draft(MockPost(), api_key='fake_api_key') From e6c62494bbb050a3ed38aa932b8cdece316cf6b2 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 18:28:44 +0000 Subject: [PATCH 038/105] Test DEV commit-on-draft --- tests/mocks.py | 5 ++++- tests/test_dev.py | 7 +++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/tests/mocks.py b/tests/mocks.py index 84260ab..43c3287 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -1,6 +1,8 @@ import frontmatter import textwrap +# A light-weight, as-needed mock of github3.repos.contents.Contents +# @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.contents.Contents class MockPost: def __init__(self): self.raw_contents = textwrap.dedent( @@ -14,6 +16,7 @@ def __init__(self): self.front, _ = frontmatter.parse(self.raw_contents) self.decoded = self.raw_contents.encode('utf-8') self.html_url = 'https://silo.com/a-beautiful-mock' + self.updated = False def update(self, *args, **kwargs): - pass + self.updated = True diff --git a/tests/test_dev.py b/tests/test_dev.py index 2e8c8f0..b9f6d19 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -51,3 +51,10 @@ def test_draft_returns_something_on_success(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') requests_mock.post("https://dev.to/api/articles", status_code=201, json={ 'type_of': 'article', 'id': 42 }) assert dev._draft(MockPost(), api_key='fake_api_key') + +def test_draft_updates_post_on_success(requests_mock, monkeypatch): + monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') + requests_mock.post("https://dev.to/api/articles", status_code=201, json={ 'type_of': 'article', 'id': 42 }) + mock = MockPost() + dev._draft(mock, api_key='fake_api_key') + assert mock.updated From 13f71d1119ccab5b75ff67768f4ae42deca9fb35 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 18:46:00 +0000 Subject: [PATCH 039/105] Implement DEV post update --- syndicate/silos/dev.py | 57 +++++++++++++++++++++++++++++++----------- 1 file changed, 43 insertions(+), 14 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index e9edfaf..08a6ecc 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -18,6 +18,15 @@ def syndicate(posts, api_key): action_warn("Draft failure D:") success = False + for post in posts['modified']: + results = _update(post, api_key) + if results: + action_log("Update success!") + action_log(results) + else: + action_warn("Update failure D:") + success = False + return success ### privates ### @@ -27,9 +36,7 @@ def syndicate(posts, api_key): def _fetch(post_id=None, api_key=None): assert api_key, "missing API key" - headers = { - 'api-key': api_key - } + headers = {'api-key': api_key} if post_id: # Fetch data for given post ID ## NOTE Currently, there's no way to fetch data for a specific post. @@ -63,34 +70,56 @@ def _draft(post, api_key=None): action_log("Drafting a post with this payload:") action_log(payload) endpoint = "https://dev.to/api/articles" - headers = { - 'api-key': api_key - } + headers = {'api-key': api_key} response = requests.post(endpoint, headers=headers, json=payload) if response.status_code != requests.codes.created: action_error("Failed to create draft!") action_error(response.json()) return None - - results = response.json() - assert results['id'] - commit_silo_id(post, results['id'], silo='dev') - return results + else: + results = response.json() + assert results['id'] + commit_silo_id(post, results['id'], silo='dev') + return results def _publish(): pass -def _update(): - pass +def _update(post, api_key=None): + assert api_key, "missing API key" + assert post, "missing post" + + endpoint = f'https://dev.to/api/articles/{_id_for(post)}' + headers = {'api-key': api_key} + payload = {'article': { 'body_markdown': post.decoded.decode('utf-8') } } + response = requests.put(endpoint, headers=headers, json=payload) + if response.status_code != requests.codes.ok: + action_error("Failed to update post!") + action_error(response.json()) + return None + else: + return response.json() + +def _id_for(post): + assert post, "missing post" + return _front_of(post)['dev_id'] + +def _front_of(post): + assert post, "missing post" + raw_contents = post.decoded.decode('utf-8') + assert frontmatter.checks(raw_contents), "post is missing frontmatter" + front, _ = frontmatter.parse(raw_contents) + return front def _payload_for(post): raw_contents = post.decoded.decode('utf-8') - assert frontmatter.checks(raw_contents), "article is missing frontmatter" + assert frontmatter.checks(raw_contents), "post is missing frontmatter" front, body = frontmatter.parse(raw_contents) assert front.get('title'), "article is missing a title" + # TODO test if can be accomplished by just sending raw_contents as body return { 'article': { 'title': front['title'], From 342c1e011b735ac5eb8c95c88aea26c4293460cb Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 19:09:36 +0000 Subject: [PATCH 040/105] Guard against missing silo post ID --- syndicate/silos/dev.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 08a6ecc..5a14436 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -103,7 +103,9 @@ def _update(post, api_key=None): def _id_for(post): assert post, "missing post" - return _front_of(post)['dev_id'] + id = _front_of(post).get('dev_id') + assert id, "missing post id for DEV" + return id def _front_of(post): assert post, "missing post" From 3638a4580f736b7d8899efef724a81d09ea21861 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 19:10:21 +0000 Subject: [PATCH 041/105] Cleanup DEV tests --- tests/test_dev.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/tests/test_dev.py b/tests/test_dev.py index b9f6d19..fbb123b 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -36,7 +36,7 @@ def fake_results(req, con): def test_draft_error_when_api_key_missing(): with pytest.raises(AssertionError): - dev._draft('asdf') + dev._draft(MockPost()) def test_draft_error_when_post_missing(): with pytest.raises(AssertionError): @@ -44,17 +44,26 @@ def test_draft_error_when_post_missing(): def test_draft_returns_nothing_when_request_fails(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') - requests_mock.post("https://dev.to/api/articles", status_code=422, json={"error": "you made a fake request"}) + requests_mock.post( + "https://dev.to/api/articles", + status_code=requests.codes.unprocessable_entity, + json={"error": "you made a unintelligble request"}) assert not dev._draft(MockPost(), api_key='fake_api_key') def test_draft_returns_something_on_success(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') - requests_mock.post("https://dev.to/api/articles", status_code=201, json={ 'type_of': 'article', 'id': 42 }) + requests_mock.post( + "https://dev.to/api/articles", + status_code=requests.codes.created, + json={ 'type_of': 'article', 'id': 42 }) assert dev._draft(MockPost(), api_key='fake_api_key') def test_draft_updates_post_on_success(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') - requests_mock.post("https://dev.to/api/articles", status_code=201, json={ 'type_of': 'article', 'id': 42 }) + requests_mock.post( + "https://dev.to/api/articles", + status_code=requests.codes.created, + json={ 'type_of': 'article', 'id': 42 }) mock = MockPost() dev._draft(mock, api_key='fake_api_key') assert mock.updated From 7ffdaabf66385c62005eb148ae4f25adf22a9b7c Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 19:10:38 +0000 Subject: [PATCH 042/105] Add tests for DEV updates --- tests/mocks.py | 1 + tests/test_dev.py | 27 +++++++++++++++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/tests/mocks.py b/tests/mocks.py index 43c3287..7f3f302 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -8,6 +8,7 @@ def __init__(self): self.raw_contents = textwrap.dedent( """ --- + dev_id: 42 title: A beautiful mock tags: beauty, fake --- diff --git a/tests/test_dev.py b/tests/test_dev.py index fbb123b..0f56645 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -67,3 +67,30 @@ def test_draft_updates_post_on_success(requests_mock, monkeypatch): mock = MockPost() dev._draft(mock, api_key='fake_api_key') assert mock.updated + +def test_update_error_when_api_key_missing(): + with pytest.raises(AssertionError): + dev._update(MockPost()) + +def test_update_error_when_post_missing(): + with pytest.raises(AssertionError): + dev._update(None) + +def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch): + monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') + mock = MockPost() + requests_mock.put( + f"https://dev.to/api/articles/{mock.front['dev_id']}", + status_code=requests.codes.unprocessable_entity, + json={"error": "you made an unintelligble request"}) + assert not dev._update(mock, api_key='fake_api_key') + +def test_update_returns_something_on_success(requests_mock, monkeypatch): + monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') + mock = MockPost() + mock_id= mock.front['dev_id'] + requests_mock.put( + f"https://dev.to/api/articles/{mock_id}", + status_code=requests.codes.ok, + json={'type_of': 'article', 'id': mock_id}) + assert dev._update(mock, api_key='fake_api_key') From c4c69ba31769bd084f6441cb3af54b4ce8f68452 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 19:15:32 +0000 Subject: [PATCH 043/105] Return early instead of erroring if no posts --- syndicate/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 0b51cd3..d5a0a2b 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -7,7 +7,10 @@ def elsewhere(silos): posts = get_posts() - assert posts, "no posts to update" + if not posts: + action_log("No posts added or updated, nothing to see here.") + action_output("time", datetime.now()) + return action_log(f"You want to publish to these places: {silos}") From abba91e7bf14a90774946c1bb0d9fe213c5a3c3a Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 19:43:25 +0000 Subject: [PATCH 044/105] Define alternate 'waterfall' syndication workflow --- .github/workflows/alt.yml | 42 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 .github/workflows/alt.yml diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml new file mode 100644 index 0000000..2095d94 --- /dev/null +++ b/.github/workflows/alt.yml @@ -0,0 +1,42 @@ +name: "An alternate way to syndicate" +on: + push: + branches: + - master + - develop + # paths: + # - 'pages/**/*.mdx?' + +jobs: + syndicate: + runs-on: ubuntu-latest + name: Syndicate posts + env: + # This is provided to all actions by Github, and needed to access the posts + # in your repository. + # @see https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + # Tell me the path (relative to the project root) where your posts live, so I can find them. + # Defaults to 'posts'. + SYNDICATE_POST_DIR: 'pages/posts' + steps: + - name: Push to DEV.to + id: DEV + uses: dabrady/syndicate@develop + with: + silos: DEV + env: + DEV_API_KEY: ${{ secrets.DEV_API_KEY }} + + - name: Push to Medium + uses: dabrady/syndicate@develop + id: Medium + with: + silos: Medium + + - name: Push to Mars + id: Planet_Mars + uses: dabrady/syndicate@develop + with: + silos: Planet_Mars From 87629b57cefa87b942583dc73e331b457c02abf5 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 20:24:45 +0000 Subject: [PATCH 045/105] Test getting input from environment variable result compilation guard against no results no need to explicitly pass inputs; using env --- .github/workflows/alt.yml | 5 +++++ action.yml | 2 -- entrypoint.py | 21 ++++++++++++++++++++- syndicate/__init__.py | 7 +++---- syndicate/silos/dev.py | 34 ++++++++++++++++++++-------------- syndicate/utils.py | 3 +++ 6 files changed, 51 insertions(+), 21 deletions(-) diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index 2095d94..d3594ec 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -40,3 +40,8 @@ jobs: uses: dabrady/syndicate@develop with: silos: Planet_Mars + + - name: Report results + id: report + run: + 'echo The results are in! "\n$SYNDICATED_POSTS"' diff --git a/action.yml b/action.yml index 9b16cb5..9fde69a 100644 --- a/action.yml +++ b/action.yml @@ -12,5 +12,3 @@ outputs: runs: using: 'docker' image: 'Dockerfile' - args: - - ${{ inputs.silos }} diff --git a/entrypoint.py b/entrypoint.py index a0d6c8a..55da02c 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -1,9 +1,28 @@ #!/usr/bin/env python3 +import json import os import sys +# NOTE This is where our action module lives in the container. +# TODO Is there a way to manipulate the path from Dockerfile? ACTION_SOURCE='/action' sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) import syndicate -syndicate.elsewhere(sys.argv[1].splitlines()) +from syndicate.utils import action_setenv + +action_inputs = { + 'silos': os.getenv('INPUT_SILOS').splitlines() +} + +# Syndicate +results = syndicate.elsewhere(action_inputs['silos']) +if results: + # Compile results for future steps. + previous_results = os.getenv('SYNDICATED_POSTS') + if previous_results: + syndicated_posts = json.loads(previous_results) + syndicated_posts.update(results) + else: + syndicated_posts = results + action_setenv('SYNDICATED_POSTS', json.dumps(syndicated_posts)) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index d5a0a2b..e2b129d 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -27,12 +27,11 @@ def elsewhere(silos): action_log("I'll do what I can.") results = {silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo)} - - action_log(results) + action_output("time", datetime.now()) + return results else: action_warn("Sorry, can't help you.") - - action_output("time", datetime.now()) + action_output("time", datetime.now()) ### privates ### _API_KEY = lambda s: f"{s.upper()}_API_KEY" diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 5a14436..3a4634e 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -8,31 +8,33 @@ def syndicate(posts, api_key): action_log("You want to syndicate these posts:") action_log(posts) - success = True + results = { + 'added': [], + 'modified': [] + } for post in posts['added']: - results = _draft(post, api_key) + post_id = _draft(post, api_key) if results: action_log("Draft success!") - action_log(results) + res['added'].append(post_id) else: - action_warn("Draft failure D:") - success = False + action_warn(f"Draft failure for '{post.name}'") for post in posts['modified']: - results = _update(post, api_key) + post_id = _update(post, api_key) if results: action_log("Update success!") - action_log(results) + res['modified'].append(post_id) else: - action_warn("Update failure D:") - success = False + action_warn(f"Update failure for '{post.name}'") - return success + return results ### privates ### ## This is a simple semantic wrapper around the DEV API, currently in beta. +# NOTE Not currently used def _fetch(post_id=None, api_key=None): assert api_key, "missing API key" @@ -79,9 +81,10 @@ def _draft(post, api_key=None): return None else: results = response.json() - assert results['id'] - commit_silo_id(post, results['id'], silo='dev') - return results + post_id = results['id'] + assert post_id + commit_silo_id(post, post_id, silo='dev') + return post_id def _publish(): pass @@ -99,7 +102,10 @@ def _update(post, api_key=None): action_error(response.json()) return None else: - return response.json() + results = response.json() + post_id = results['id'] + assert post_id + return post_id def _id_for(post): assert post, "missing post" diff --git a/syndicate/utils.py b/syndicate/utils.py index 9c4d626..761e410 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -29,6 +29,9 @@ def _wrapper(*args, **kwargs): return _wrapper return _decorator +def action_setenv(key, value): + print(f"::set-env name={key}::{value}") + # Memoize authentication @functools.lru_cache(maxsize=1) def repo(): From c1b811f5f15542bfbac0b6ab18fd28a00b540c52 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 20:58:06 +0000 Subject: [PATCH 046/105] Capture output of individual step --- .github/workflows/alt.yml | 3 +++ action.yml | 2 ++ entrypoint.py | 3 ++- 3 files changed, 7 insertions(+), 1 deletion(-) diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index d3594ec..648880b 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -29,6 +29,9 @@ jobs: env: DEV_API_KEY: ${{ secrets.DEV_API_KEY }} + - name: Report DEV.to results + run: 'echo "${{ toJSON(steps.DEV.outputs) }}"' + - name: Push to Medium uses: dabrady/syndicate@develop id: Medium diff --git a/action.yml b/action.yml index 9fde69a..c3137df 100644 --- a/action.yml +++ b/action.yml @@ -9,6 +9,8 @@ outputs: # TODO Change this to a map of publish times keyed by silo time: # id of output description: 'The time this action finished' + syndicated_posts: + description: 'A JSON object reporting the results of syndicating to the specified silos.' runs: using: 'docker' image: 'Dockerfile' diff --git a/entrypoint.py b/entrypoint.py index 55da02c..7a8fe3b 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -9,7 +9,7 @@ sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) import syndicate -from syndicate.utils import action_setenv +from syndicate.utils import action_output, action_setenv action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines() @@ -17,6 +17,7 @@ # Syndicate results = syndicate.elsewhere(action_inputs['silos']) +action_output('syndicated_posts', results) if results: # Compile results for future steps. previous_results = os.getenv('SYNDICATED_POSTS') From 45aacbc460910f6e5de9a130b3ef2605effbd79e Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 21:10:29 +0000 Subject: [PATCH 047/105] Add 'commit' flag to control post update behavior --- action.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/action.yml b/action.yml index c3137df..57c6b66 100644 --- a/action.yml +++ b/action.yml @@ -5,6 +5,10 @@ inputs: description: 'A list of names indicating the platforms to publish your content to.' required: true default: '' + commit: + description: 'Set this to true to update the frontmatter of your posts with their syndicate IDs' + required: false + default: false outputs: # TODO Change this to a map of publish times keyed by silo time: # id of output From 5789c29b0ef6a822a69d4de0f95a7b64e28137ec Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 21:10:54 +0000 Subject: [PATCH 048/105] Test commit flag --- .github/workflows/alt.yml | 8 +++++++- entrypoint.py | 3 ++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index 648880b..dc056d1 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -26,6 +26,7 @@ jobs: uses: dabrady/syndicate@develop with: silos: DEV + commit: true env: DEV_API_KEY: ${{ secrets.DEV_API_KEY }} @@ -47,4 +48,9 @@ jobs: - name: Report results id: report run: - 'echo The results are in! "\n$SYNDICATED_POSTS"' + 'echo The results are in! "${{ toJSON(env.SYNDICATED_POSTS) }}"' + + # - name: Update posts with syndicate IDs + # uses: dabrady/syndicate@develop + # with: + # commit: true diff --git a/entrypoint.py b/entrypoint.py index 7a8fe3b..656de3d 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -12,7 +12,8 @@ from syndicate.utils import action_output, action_setenv action_inputs = { - 'silos': os.getenv('INPUT_SILOS').splitlines() + 'silos': os.getenv('INPUT_SILOS').splitlines(), + 'commit': json.loads(os.getenv('INPUT_COMMIT')) } # Syndicate From 1b152388b29eab529782e223ea58efa281e1bae2 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 21:20:35 +0000 Subject: [PATCH 049/105] Pass all action inputs to 'elsewhere' --- entrypoint.py | 2 +- syndicate/__init__.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index 656de3d..5a4468d 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -17,7 +17,7 @@ } # Syndicate -results = syndicate.elsewhere(action_inputs['silos']) +results = syndicate.elsewhere(**action_inputs) action_output('syndicated_posts', results) if results: # Compile results for future steps. diff --git a/syndicate/__init__.py b/syndicate/__init__.py index e2b129d..9f270d2 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -5,7 +5,7 @@ import os import importlib.util -def elsewhere(silos): +def elsewhere(silos=None, commit=False): posts = get_posts() if not posts: action_log("No posts added or updated, nothing to see here.") From b449261cc162e56a7cc877e840d4abc664c90990 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 21:25:11 +0000 Subject: [PATCH 050/105] Change 'commit' input to 'commit_on_create' dummy edit to test all ze changes fix refactor artifact dummy post update --- action.yml | 4 ++-- entrypoint.py | 2 +- pages/posts/dumb.md | 3 +-- syndicate/__init__.py | 14 ++++++++++---- syndicate/silos/dev.py | 8 ++++---- 5 files changed, 18 insertions(+), 13 deletions(-) diff --git a/action.yml b/action.yml index 57c6b66..1d305c7 100644 --- a/action.yml +++ b/action.yml @@ -5,8 +5,8 @@ inputs: description: 'A list of names indicating the platforms to publish your content to.' required: true default: '' - commit: - description: 'Set this to true to update the frontmatter of your posts with their syndicate IDs' + commit_on_create: + description: 'Set this to true to update the frontmatter of new posts with their syndicate IDs' required: false default: false outputs: diff --git a/entrypoint.py b/entrypoint.py index 5a4468d..1e9e221 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -13,7 +13,7 @@ action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), - 'commit': json.loads(os.getenv('INPUT_COMMIT')) + 'commit_on_create': json.loads(os.getenv('INPUT_COMMIT_ON_CREATE')) } # Syndicate diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index 58d166d..87109f5 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -2,5 +2,4 @@ dev_id: 252963 title: this is not a post --- - -These are not the contents of this post. +These are not the contents you're looking for. diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 9f270d2..7b10747 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -5,7 +5,7 @@ import os import importlib.util -def elsewhere(silos=None, commit=False): +def elsewhere(silos=[], commit_on_create=False): posts = get_posts() if not posts: action_log("No posts added or updated, nothing to see here.") @@ -20,13 +20,19 @@ def elsewhere(silos=None, commit=False): available_keys = {silo:_has_api_key(silo) for silo in recognized_silos.keys()} - if not all(available_keys.values()): - action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") - if any(available_keys.values()): action_log("I'll do what I can.") + if not all(available_keys.values()): + action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") results = {silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo)} + + if commit_on_create: + action_log("Sorry, commit not yet supported") + pass + else: + action_log("You opted not to update your repo with the syndicate IDs of newly added posts") + action_output("time", datetime.now()) return results else: diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 3a4634e..17d6ed5 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -14,17 +14,17 @@ def syndicate(posts, api_key): } for post in posts['added']: post_id = _draft(post, api_key) - if results: + if post_id: action_log("Draft success!") - res['added'].append(post_id) + results['added'].append(post_id) else: action_warn(f"Draft failure for '{post.name}'") for post in posts['modified']: post_id = _update(post, api_key) - if results: + if post_id: action_log("Update success!") - res['modified'].append(post_id) + results['modified'].append(post_id) else: action_warn(f"Update failure for '{post.name}'") From 75b38369a58e393f07a80056e33a964e3c2737e5 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 21:31:25 +0000 Subject: [PATCH 051/105] Disable main flow on develop dummy edit, action is stale --- .github/workflows/main.yml | 3 ++- pages/posts/dumb.md | 1 + syndicate/silos/dev.py | 4 ++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5daeba8..170038d 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -2,7 +2,8 @@ on: push: branches: - master - - develop + ## Working on alternate flow for now + # - develop paths: - 'pages/**/*.mdx?' diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index 87109f5..d9ed893 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -2,4 +2,5 @@ dev_id: 252963 title: this is not a post --- + These are not the contents you're looking for. diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 17d6ed5..b55da5f 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -15,7 +15,7 @@ def syndicate(posts, api_key): for post in posts['added']: post_id = _draft(post, api_key) if post_id: - action_log("Draft success!") + action_log("Drafted successfully!") results['added'].append(post_id) else: action_warn(f"Draft failure for '{post.name}'") @@ -23,7 +23,7 @@ def syndicate(posts, api_key): for post in posts['modified']: post_id = _update(post, api_key) if post_id: - action_log("Update success!") + action_log("Updated successfully!") results['modified'].append(post_id) else: action_warn(f"Update failure for '{post.name}'") From faa89fda2bec9fc2e33302dba2fae4d0bcc7ae2f Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 21:38:35 +0000 Subject: [PATCH 052/105] Pull up 'commit on create' into entrypoint --- entrypoint.py | 9 ++++++++- syndicate/__init__.py | 8 +------- syndicate/silos/dev.py | 1 + 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index 1e9e221..17def4d 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -9,7 +9,7 @@ sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) import syndicate -from syndicate.utils import action_output, action_setenv +from syndicate.utils import action_log, action_output, action_setenv action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), @@ -19,6 +19,13 @@ # Syndicate results = syndicate.elsewhere(**action_inputs) action_output('syndicated_posts', results) + +## TODO commit up here using 'SYNDICATED_POSTS' or results +if action_inputs['commit_on_create']: + action_log("Sorry, commit not yet supported") +else: + action_log("You opted not to update your repo with the syndicate IDs of newly added posts") + if results: # Compile results for future steps. previous_results = os.getenv('SYNDICATED_POSTS') diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 7b10747..45b9805 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -5,7 +5,7 @@ import os import importlib.util -def elsewhere(silos=[], commit_on_create=False): +def elsewhere(silos): posts = get_posts() if not posts: action_log("No posts added or updated, nothing to see here.") @@ -27,12 +27,6 @@ def elsewhere(silos=[], commit_on_create=False): results = {silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo)} - if commit_on_create: - action_log("Sorry, commit not yet supported") - pass - else: - action_log("You opted not to update your repo with the syndicate IDs of newly added posts") - action_output("time", datetime.now()) return results else: diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index b55da5f..c52a7b5 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -83,6 +83,7 @@ def _draft(post, api_key=None): results = response.json() post_id = results['id'] assert post_id + ## TODO Move this up to `elsewhere` commit_silo_id(post, post_id, silo='dev') return post_id From 8f0c57066c7c984a7507676623e3a948ef7d141e Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 21:39:14 +0000 Subject: [PATCH 053/105] Add 'commit' step to alternate flow whoops, missed a spot wrong input name debug statement moar debugs wrong input for dev step shortcircuit if no silos recognized shortcircuit if no posts changed in commit --- .github/workflows/alt.yml | 10 +++++----- entrypoint.py | 2 +- syndicate/__init__.py | 5 ++--- syndicate/utils.py | 6 ++++-- 4 files changed, 12 insertions(+), 11 deletions(-) diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index dc056d1..656f40d 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -26,7 +26,7 @@ jobs: uses: dabrady/syndicate@develop with: silos: DEV - commit: true + commit_on_create: true env: DEV_API_KEY: ${{ secrets.DEV_API_KEY }} @@ -50,7 +50,7 @@ jobs: run: 'echo The results are in! "${{ toJSON(env.SYNDICATED_POSTS) }}"' - # - name: Update posts with syndicate IDs - # uses: dabrady/syndicate@develop - # with: - # commit: true + - name: Update posts with syndicate IDs + uses: dabrady/syndicate@develop + with: + commit_on_create: true diff --git a/entrypoint.py b/entrypoint.py index 17def4d..511fe1b 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -17,7 +17,7 @@ } # Syndicate -results = syndicate.elsewhere(**action_inputs) +results = syndicate.elsewhere(action_inputs['silos']) action_output('syndicated_posts', results) ## TODO commit up here using 'SYNDICATED_POSTS' or results diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 45b9805..a31e25a 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -16,11 +16,10 @@ def elsewhere(silos): specs = {silo:_locate(silo) for silo in silos} recognized_silos = {silo:spec for (silo,spec) in specs.items() if spec} - action_log(f"I know how to publish to these places: {list(recognized_silos.keys())}") - available_keys = {silo:_has_api_key(silo) for silo in recognized_silos.keys()} - if any(available_keys.values()): + if recognized_silos and any(available_keys.values()): + action_log(f"I know how to publish to these places: {list(recognized_silos.keys())}") action_log("I'll do what I can.") if not all(available_keys.values()): action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") diff --git a/syndicate/utils.py b/syndicate/utils.py index 761e410..82d5606 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -51,11 +51,13 @@ def file_contents(filename): def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): files = get_commit_payload() - assert files, "could not fetch commit payload" + assert files, "commit had no files in its payload" posts = [file for file in files if file['filename'].startswith(post_dir)] - post_contents = {post['status']:file_contents(post['filename']) for post in posts} + if not posts: + return None + post_contents = {post['status']:file_contents(post['filename']) for post in posts} return { 'added': [contents for (status, contents) in post_contents.items() if status == 'added'], 'modified': [contents for (status, contents) in post_contents.items() if status == 'modified'] From a11c0ededa8a2e90dcd8b478dd902c8b7554c31d Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 22:49:06 +0000 Subject: [PATCH 054/105] Don't assert on data I can't control --- syndicate/silos/dev.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index c52a7b5..81ed4aa 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -81,11 +81,9 @@ def _draft(post, api_key=None): return None else: results = response.json() - post_id = results['id'] - assert post_id ## TODO Move this up to `elsewhere` commit_silo_id(post, post_id, silo='dev') - return post_id + return results['id'] def _publish(): pass @@ -104,20 +102,15 @@ def _update(post, api_key=None): return None else: results = response.json() - post_id = results['id'] - assert post_id - return post_id + return results['id'] def _id_for(post): assert post, "missing post" - id = _front_of(post).get('dev_id') - assert id, "missing post id for DEV" - return id + return _front_of(post).get('dev_id') def _front_of(post): assert post, "missing post" raw_contents = post.decoded.decode('utf-8') - assert frontmatter.checks(raw_contents), "post is missing frontmatter" front, _ = frontmatter.parse(raw_contents) return front From a38205a548ec99228ff8631414d075ac052d04be Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 22:49:21 +0000 Subject: [PATCH 055/105] Stop commiting DEV drafts to repo by default --- syndicate/silos/dev.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 81ed4aa..1bdb245 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -81,8 +81,6 @@ def _draft(post, api_key=None): return None else: results = response.json() - ## TODO Move this up to `elsewhere` - commit_silo_id(post, post_id, silo='dev') return results['id'] def _publish(): From 581d63381a5dccf8398149f15cd091bd863ad52a Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 23:18:18 +0000 Subject: [PATCH 056/105] Compress DEV --- syndicate/silos/dev.py | 58 ++++++++++++------------------------------ 1 file changed, 16 insertions(+), 42 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 1bdb245..1870725 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -5,30 +5,11 @@ @action_log_group("dev") def syndicate(posts, api_key): action_log("Hello? Yes, this is DEV.") - action_log("You want to syndicate these posts:") - action_log(posts) - results = { - 'added': [], - 'modified': [] + return { + 'added': [id for id in (_draft(post, api_key) for post in posts if not _id_for(post)) if id], + 'modified': [id for id in (_update(post, api_key) for post in posts if _id_for(post)) if id] } - for post in posts['added']: - post_id = _draft(post, api_key) - if post_id: - action_log("Drafted successfully!") - results['added'].append(post_id) - else: - action_warn(f"Draft failure for '{post.name}'") - - for post in posts['modified']: - post_id = _update(post, api_key) - if post_id: - action_log("Updated successfully!") - results['modified'].append(post_id) - else: - action_warn(f"Update failure for '{post.name}'") - - return results ### privates ### @@ -69,23 +50,18 @@ def _draft(post, api_key=None): payload = _payload_for(post) - action_log("Drafting a post with this payload:") - action_log(payload) endpoint = "https://dev.to/api/articles" headers = {'api-key': api_key} response = requests.post(endpoint, headers=headers, json=payload) if response.status_code != requests.codes.created: - action_error("Failed to create draft!") + action_error(f"Failed to create draft for '{post.name}'") action_error(response.json()) return None else: results = response.json() return results['id'] -def _publish(): - pass - def _update(post, api_key=None): assert api_key, "missing API key" assert post, "missing post" @@ -95,7 +71,7 @@ def _update(post, api_key=None): payload = {'article': { 'body_markdown': post.decoded.decode('utf-8') } } response = requests.put(endpoint, headers=headers, json=payload) if response.status_code != requests.codes.ok: - action_error("Failed to update post!") + action_error(f"Failed to update post '{post.name}'") action_error(response.json()) return None else: @@ -104,29 +80,27 @@ def _update(post, api_key=None): def _id_for(post): assert post, "missing post" - return _front_of(post).get('dev_id') + return _fronted(post).get('dev_id') -def _front_of(post): +def _fronted(post): assert post, "missing post" raw_contents = post.decoded.decode('utf-8') - front, _ = frontmatter.parse(raw_contents) - return front + return frontmatter.loads(raw_contents) def _payload_for(post): - raw_contents = post.decoded.decode('utf-8') - assert frontmatter.checks(raw_contents), "post is missing frontmatter" + assert post, "missing post" - front, body = frontmatter.parse(raw_contents) - assert front.get('title'), "article is missing a title" + fronted_post = _fronted(post) + assert fronted_post.get('title'), "article is missing a title" - # TODO test if can be accomplished by just sending raw_contents as body + # TODO test if can be accomplished by just sending raw contents as body_markdown return { 'article': { - 'title': front['title'], + 'title': fronted_post['title'], 'published': False, - 'tags': yaml_sequence(front.get('tags', [])), - 'series': front.get('series', None), + 'tags': yaml_sequence(fronted_post.get('tags', [])), + 'series': fronted_post.get('series', None), 'canonical_url': get_canonical_url(post), - 'body_markdown': body + 'body_markdown': fronted_post.content } } From 13a06e61b46c95cff27fe0c73bd4c31e7edcbcfa Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 23:23:39 +0000 Subject: [PATCH 057/105] Compress post aggregation --- syndicate/utils.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/syndicate/utils.py b/syndicate/utils.py index 82d5606..b037ba7 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -56,12 +56,9 @@ def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): posts = [file for file in files if file['filename'].startswith(post_dir)] if not posts: return None - - post_contents = {post['status']:file_contents(post['filename']) for post in posts} - return { - 'added': [contents for (status, contents) in post_contents.items() if status == 'added'], - 'modified': [contents for (status, contents) in post_contents.items() if status == 'modified'] - } + else: + # Don't care about the Git status: it might not be in sync with the silo + return [file_contents(post['filename']) for post in posts] def get_canonical_url(post): assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" From 960abea99c274a73fcda795c9b752637f5fbc6c7 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 1 Feb 2020 23:30:46 +0000 Subject: [PATCH 058/105] Compress DEV error output --- syndicate/silos/dev.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 1870725..07a0093 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -55,8 +55,7 @@ def _draft(post, api_key=None): response = requests.post(endpoint, headers=headers, json=payload) if response.status_code != requests.codes.created: - action_error(f"Failed to create draft for '{post.name}'") - action_error(response.json()) + action_error(f"Failed to create draft for '{post.name}': {response.json()}") return None else: results = response.json() @@ -71,8 +70,7 @@ def _update(post, api_key=None): payload = {'article': { 'body_markdown': post.decoded.decode('utf-8') } } response = requests.put(endpoint, headers=headers, json=payload) if response.status_code != requests.codes.ok: - action_error(f"Failed to update post '{post.name}'") - action_error(response.json()) + action_error(f"Failed to update post '{post.name}': {response.json()}") return None else: results = response.json() From 47a05f830f5b45004b5aeaf383f1ed45db7fcfed Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 2 Feb 2020 00:29:48 +0000 Subject: [PATCH 059/105] Move post processors into utils --- syndicate/silos/dev.py | 25 ++++++++----------------- syndicate/utils.py | 14 ++++++++++++-- 2 files changed, 20 insertions(+), 19 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 07a0093..031e5e4 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,14 +1,14 @@ -from syndicate.utils import action_log_group, action_log, action_warn, action_error, get_canonical_url, yaml_sequence, commit_silo_id -import frontmatter +from syndicate.utils import action_log_group, action_log, action_warn, action_error, get_canonical_url, yaml_sequence, fronted, id_for, commit_silo_id import requests -@action_log_group("dev") +SILO = 'dev' + +@action_log_group(SILO) def syndicate(posts, api_key): action_log("Hello? Yes, this is DEV.") - return { - 'added': [id for id in (_draft(post, api_key) for post in posts if not _id_for(post)) if id], - 'modified': [id for id in (_update(post, api_key) for post in posts if _id_for(post)) if id] + 'added': [id for id in (_draft(post, api_key) for post in posts if not id_for(post, SILO)) if id], + 'modified': [id for id in (_update(post, api_key) for post in posts if id_for(post, SILO)) if id] } ### privates ### @@ -65,7 +65,7 @@ def _update(post, api_key=None): assert api_key, "missing API key" assert post, "missing post" - endpoint = f'https://dev.to/api/articles/{_id_for(post)}' + endpoint = f'https://dev.to/api/articles/{id_for(post, SILO)}' headers = {'api-key': api_key} payload = {'article': { 'body_markdown': post.decoded.decode('utf-8') } } response = requests.put(endpoint, headers=headers, json=payload) @@ -76,19 +76,10 @@ def _update(post, api_key=None): results = response.json() return results['id'] -def _id_for(post): - assert post, "missing post" - return _fronted(post).get('dev_id') - -def _fronted(post): - assert post, "missing post" - raw_contents = post.decoded.decode('utf-8') - return frontmatter.loads(raw_contents) - def _payload_for(post): assert post, "missing post" - fronted_post = _fronted(post) + fronted_post = fronted(post) assert fronted_post.get('title'), "article is missing a title" # TODO test if can be accomplished by just sending raw contents as body_markdown diff --git a/syndicate/utils.py b/syndicate/utils.py index b037ba7..fdea471 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -76,12 +76,22 @@ def yaml_sequence(sequence): # If I know how to handle it, handle it; otherwise, just give it back return cases.get(type(sequence), JUST_GIVE_IT_BACK)(sequence) -def commit_silo_id(post, post_id, silo=None): +def fronted(post): + assert post, "missing post" + raw_contents = post.decoded.decode('utf-8') + return frontmatter.loads(raw_contents) + +def id_for(post, silo): + assert post, "missing post" + assert silo, "missing silo" + return fronted(post).get(f'{silo}_id') + +def commit_silo_id(post, post_id, silo): assert post, "missing post info" assert post_id, "missing post ID" assert silo, "silo not specified" - fronted_post = frontmatter.loads(post.decoded.decode('utf-8')) + fronted_post = fronted(post) fronted_post[f'{silo}_id'] = post_id action_log(f"Updating frontmatter with ID for {silo}") From 7580f9b9d252ab088ab38999db86461bc7804678 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 2 Feb 2020 00:30:41 +0000 Subject: [PATCH 060/105] Change syndicate ID template --- pages/posts/dumb.md | 2 +- syndicate/utils.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index d9ed893..2b77d38 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1,5 +1,5 @@ --- -dev_id: 252963 +dev_syndicate_id: 252963 title: this is not a post --- diff --git a/syndicate/utils.py b/syndicate/utils.py index fdea471..d7b8c7c 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -84,7 +84,7 @@ def fronted(post): def id_for(post, silo): assert post, "missing post" assert silo, "missing silo" - return fronted(post).get(f'{silo}_id') + return fronted(post).get(f'{silo}_syndicate_id') # TODO extract this template def commit_silo_id(post, post_id, silo): assert post, "missing post info" @@ -92,7 +92,7 @@ def commit_silo_id(post, post_id, silo): assert silo, "silo not specified" fronted_post = fronted(post) - fronted_post[f'{silo}_id'] = post_id + fronted_post[f'{silo}_syndicate_id'] = post_id action_log(f"Updating frontmatter with ID for {silo}") pushed_change = post.update( From 71c8a62173c71a21988b9952844c190810c42dac Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 2 Feb 2020 01:11:39 +0000 Subject: [PATCH 061/105] 'mark as syndicated' scaffolding --- entrypoint.py | 20 +++++++++++++------- syndicate/utils.py | 4 ++++ 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index 511fe1b..b24dc79 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -9,7 +9,7 @@ sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) import syndicate -from syndicate.utils import action_log, action_output, action_setenv +from syndicate.utils import action_log, action_output, action_setenv, mark_as_syndicated action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), @@ -20,12 +20,6 @@ results = syndicate.elsewhere(action_inputs['silos']) action_output('syndicated_posts', results) -## TODO commit up here using 'SYNDICATED_POSTS' or results -if action_inputs['commit_on_create']: - action_log("Sorry, commit not yet supported") -else: - action_log("You opted not to update your repo with the syndicate IDs of newly added posts") - if results: # Compile results for future steps. previous_results = os.getenv('SYNDICATED_POSTS') @@ -35,3 +29,15 @@ else: syndicated_posts = results action_setenv('SYNDICATED_POSTS', json.dumps(syndicated_posts)) + +## TODO commit up here using 'SYNDICATED_POSTS' or results +if action_inputs['commit_on_create']: + # NOTE In the special case where no silos were provided, commit all compiled results + if action_inputs['silos']: + action_log("marking most recent results") + mark_as_syndicated(results) + else: + action_log("marking all results") + mark_as_syndicated(json.loads(os.getenv('SYNDICATED_POSTS'))) +else: + action_log("You opted not to update your repo with the syndicate IDs of newly added posts") diff --git a/syndicate/utils.py b/syndicate/utils.py index d7b8c7c..044f6f8 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -100,3 +100,7 @@ def commit_silo_id(post, post_id, silo): frontmatter.dumps(fronted_post).encode('utf-8') ) action_log(pushed_change) + +def mark_as_syndicated(result_set): + action_log('marking!!!') + pass From d9c662b5cffec824d2bb8b8d44d62b6154f6ea4c Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 2 Feb 2020 11:38:36 +0000 Subject: [PATCH 062/105] Rename 'commit_on_create' -> 'mark_as_syndicated' --- .github/workflows/alt.yml | 18 +++++++----------- action.yml | 4 ++-- entrypoint.py | 5 +++-- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index 656f40d..f8d5dc2 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -26,31 +26,27 @@ jobs: uses: dabrady/syndicate@develop with: silos: DEV - commit_on_create: true + mark_as_syndicated: true env: DEV_API_KEY: ${{ secrets.DEV_API_KEY }} - name: Report DEV.to results run: 'echo "${{ toJSON(steps.DEV.outputs) }}"' - - name: Push to Medium + - name: Push to Medium and Planet Mars uses: dabrady/syndicate@develop id: Medium with: - silos: Medium - - - name: Push to Mars - id: Planet_Mars - uses: dabrady/syndicate@develop - with: - silos: Planet_Mars + silos: | + Medium + Planet_Mars - name: Report results id: report run: - 'echo The results are in! "${{ toJSON(env.SYNDICATED_POSTS) }}"' + 'echo "The results are in! ${{ toJSON(env.SYNDICATED_POSTS) }}"' - name: Update posts with syndicate IDs uses: dabrady/syndicate@develop with: - commit_on_create: true + mark_as_syndicated: true diff --git a/action.yml b/action.yml index 1d305c7..1e29feb 100644 --- a/action.yml +++ b/action.yml @@ -3,9 +3,9 @@ description: 'Publish content to other places' inputs: silos: # id of input description: 'A list of names indicating the platforms to publish your content to.' - required: true + required: false default: '' - commit_on_create: + mark_as_syndicated: description: 'Set this to true to update the frontmatter of new posts with their syndicate IDs' required: false default: false diff --git a/entrypoint.py b/entrypoint.py index b24dc79..0c2f88a 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -13,7 +13,7 @@ action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), - 'commit_on_create': json.loads(os.getenv('INPUT_COMMIT_ON_CREATE')) + 'mark_as_syndicated': json.loads(os.getenv('INPUT_MARK_AS_SYNDICATED')) } # Syndicate @@ -31,13 +31,14 @@ action_setenv('SYNDICATED_POSTS', json.dumps(syndicated_posts)) ## TODO commit up here using 'SYNDICATED_POSTS' or results -if action_inputs['commit_on_create']: +if action_inputs['mark_as_syndicated']: # NOTE In the special case where no silos were provided, commit all compiled results if action_inputs['silos']: action_log("marking most recent results") mark_as_syndicated(results) else: action_log("marking all results") + ## TODO fix null pointer JSON parsing mark_as_syndicated(json.loads(os.getenv('SYNDICATED_POSTS'))) else: action_log("You opted not to update your repo with the syndicate IDs of newly added posts") From 7aad2695066dbbee49c0fcfa8810bb78e7f5d847 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 2 Feb 2020 13:07:18 +0000 Subject: [PATCH 063/105] Compress 'mark as syndicated' scaffolding fix tests (I should really get CI going) dummy change test commit --- entrypoint.py | 28 ++-------------------------- pages/posts/dumb.md | 7 +------ syndicate/__init__.py | 10 +++++++--- syndicate/utils.py | 21 ++++++++++++++++++--- tests/mocks.py | 9 +++------ tests/test_dev.py | 15 +++------------ 6 files changed, 34 insertions(+), 56 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index 0c2f88a..58b5545 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -9,7 +9,7 @@ sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) import syndicate -from syndicate.utils import action_log, action_output, action_setenv, mark_as_syndicated +from syndicate.utils import action_log, action_output, action_setenv action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), @@ -17,28 +17,4 @@ } # Syndicate -results = syndicate.elsewhere(action_inputs['silos']) -action_output('syndicated_posts', results) - -if results: - # Compile results for future steps. - previous_results = os.getenv('SYNDICATED_POSTS') - if previous_results: - syndicated_posts = json.loads(previous_results) - syndicated_posts.update(results) - else: - syndicated_posts = results - action_setenv('SYNDICATED_POSTS', json.dumps(syndicated_posts)) - -## TODO commit up here using 'SYNDICATED_POSTS' or results -if action_inputs['mark_as_syndicated']: - # NOTE In the special case where no silos were provided, commit all compiled results - if action_inputs['silos']: - action_log("marking most recent results") - mark_as_syndicated(results) - else: - action_log("marking all results") - ## TODO fix null pointer JSON parsing - mark_as_syndicated(json.loads(os.getenv('SYNDICATED_POSTS'))) -else: - action_log("You opted not to update your repo with the syndicate IDs of newly added posts") +action_output('syndicated_posts', syndicate.elsewhere(**action_inputs)) diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index 2b77d38..79d52c1 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1,6 +1 @@ ---- -dev_syndicate_id: 252963 -title: this is not a post ---- - -These are not the contents you're looking for. +new contents \ No newline at end of file diff --git a/syndicate/__init__.py b/syndicate/__init__.py index a31e25a..bb71ee1 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,16 +1,16 @@ from datetime import datetime -from syndicate.utils import action_log, action_warn, action_output, get_posts +from syndicate.utils import action_log, action_warn, action_output, get_posts, mark_syndicated_posts import sys import os import importlib.util -def elsewhere(silos): +def elsewhere(silos, mark_as_syndicated): posts = get_posts() if not posts: action_log("No posts added or updated, nothing to see here.") action_output("time", datetime.now()) - return + return None action_log(f"You want to publish to these places: {silos}") @@ -25,12 +25,16 @@ def elsewhere(silos): action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") results = {silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo)} + if mark_as_syndicated: + # NOTE In the special case where no silos were provided, commit all compiled results + mark_syndicated_posts(results, all=not bool(silos)) action_output("time", datetime.now()) return results else: action_warn("Sorry, can't help you.") action_output("time", datetime.now()) + return None ### privates ### _API_KEY = lambda s: f"{s.upper()}_API_KEY" diff --git a/syndicate/utils.py b/syndicate/utils.py index 044f6f8..f17b6e8 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -101,6 +101,21 @@ def commit_silo_id(post, post_id, silo): ) action_log(pushed_change) -def mark_as_syndicated(result_set): - action_log('marking!!!') - pass +def update_compiled_output(results): + if not results: + return + + # Compile results for future steps. + previous_results = os.getenv('SYNDICATED_POSTS', results) + if previous_results != results: + syndicated_posts = json.loads(previous_results) + syndicated_posts.update(results) + action_setenv('SYNDICATED_POSTS', json.dumps(syndicated_posts)) + + +def mark_syndicated_posts(result_set, all=False): + assert results, "no results to mark as syndicated!" + action_log(f'marking {"all" if all else "most recent"}!!!') + + for (silo, result) in result_set.items(): + pass diff --git a/tests/mocks.py b/tests/mocks.py index 7f3f302..a0eb2d4 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -8,7 +8,7 @@ def __init__(self): self.raw_contents = textwrap.dedent( """ --- - dev_id: 42 + dev_syndicate_id: 42 title: A beautiful mock tags: beauty, fake --- @@ -16,8 +16,5 @@ def __init__(self): """).strip() self.front, _ = frontmatter.parse(self.raw_contents) self.decoded = self.raw_contents.encode('utf-8') - self.html_url = 'https://silo.com/a-beautiful-mock' - self.updated = False - - def update(self, *args, **kwargs): - self.updated = True + self.name = 'a-beautiful-mock.md' + self.html_url = f'https://silo.com/{self.name}' diff --git a/tests/test_dev.py b/tests/test_dev.py index 0f56645..14158be 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -1,3 +1,4 @@ +from syndicate.utils import id_for from syndicate.silos import dev from .mocks import MockPost import pytest @@ -58,16 +59,6 @@ def test_draft_returns_something_on_success(requests_mock, monkeypatch): json={ 'type_of': 'article', 'id': 42 }) assert dev._draft(MockPost(), api_key='fake_api_key') -def test_draft_updates_post_on_success(requests_mock, monkeypatch): - monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') - requests_mock.post( - "https://dev.to/api/articles", - status_code=requests.codes.created, - json={ 'type_of': 'article', 'id': 42 }) - mock = MockPost() - dev._draft(mock, api_key='fake_api_key') - assert mock.updated - def test_update_error_when_api_key_missing(): with pytest.raises(AssertionError): dev._update(MockPost()) @@ -80,7 +71,7 @@ def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') mock = MockPost() requests_mock.put( - f"https://dev.to/api/articles/{mock.front['dev_id']}", + f"https://dev.to/api/articles/{id_for(mock, dev.SILO)}", status_code=requests.codes.unprocessable_entity, json={"error": "you made an unintelligble request"}) assert not dev._update(mock, api_key='fake_api_key') @@ -88,7 +79,7 @@ def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch): def test_update_returns_something_on_success(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') mock = MockPost() - mock_id= mock.front['dev_id'] + mock_id= id_for(mock, dev.SILO) requests_mock.put( f"https://dev.to/api/articles/{mock_id}", status_code=requests.codes.ok, From e0458870476aa4bd6d4f3d3157418d69f5d2ffec Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 2 Feb 2020 16:25:48 +0000 Subject: [PATCH 064/105] Add 'mark as syndicated' logic scaffolding --- entrypoint.py | 22 ++++++++++- syndicate/__init__.py | 18 ++++++--- syndicate/utils.py | 88 +++++++++++++++++++++++++++++++++++++------ 3 files changed, 109 insertions(+), 19 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index 58b5545..66b0d1d 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -9,7 +9,7 @@ sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) import syndicate -from syndicate.utils import action_log, action_output, action_setenv +from syndicate.utils import action_log, action_output, job_output, mark_syndicated_posts action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), @@ -17,4 +17,22 @@ } # Syndicate -action_output('syndicated_posts', syndicate.elsewhere(**action_inputs)) +results = syndicate.elsewhere(action_inputs['silos']) +action_output('syndicated_posts', results) + +# Merge output with output of any previous runs +job_results_so_far = job_output(results) + +# Mark as syndicated +if mark_as_syndicated: + ## NOTE + # If silos were provided, commit only the results of this step. In the case + # where no silos were provided, commit all job results so far. + # + # This allows us to bundle sydications into as few or many commits as we + # want in our workflows. + ## + if action_inputs['silos']: + mark_syndicated_posts(results) + else: + mark_syndicated_posts(job_results_so_far) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index bb71ee1..f331c4a 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,11 +1,16 @@ from datetime import datetime -from syndicate.utils import action_log, action_warn, action_output, get_posts, mark_syndicated_posts +from syndicate.utils import action_log, action_warn, action_output, get_posts import sys import os import importlib.util -def elsewhere(silos, mark_as_syndicated): +def elsewhere(silos): + if not silos: + action_log('No silos specified, nothing to see here.') + action_output("time", datetime.now()) + return None + posts = get_posts() if not posts: action_log("No posts added or updated, nothing to see here.") @@ -24,10 +29,11 @@ def elsewhere(silos, mark_as_syndicated): if not all(available_keys.values()): action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") - results = {silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo)} - if mark_as_syndicated: - # NOTE In the special case where no silos were provided, commit all compiled results - mark_syndicated_posts(results, all=not bool(silos)) + results = { + silo:_syndicate(spec, _get_api_key(silo), posts) + for (silo,spec) in specs.items() + if _has_api_key(silo) + } action_output("time", datetime.now()) return results diff --git a/syndicate/utils.py b/syndicate/utils.py index f17b6e8..139745c 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -2,6 +2,7 @@ import functools import github3 import os +import requests ### Github Action utilities ### def action_log(msg): @@ -86,6 +87,7 @@ def id_for(post, silo): assert silo, "missing silo" return fronted(post).get(f'{silo}_syndicate_id') # TODO extract this template +# @DEPRECATED, DELETEME def commit_silo_id(post, post_id, silo): assert post, "missing post info" assert post_id, "missing post ID" @@ -101,21 +103,85 @@ def commit_silo_id(post, post_id, silo): ) action_log(pushed_change) -def update_compiled_output(results): - if not results: - return +def job_output(results): + assert results, "no results to compile!" # Compile results for future steps. - previous_results = os.getenv('SYNDICATED_POSTS', results) - if previous_results != results: - syndicated_posts = json.loads(previous_results) + syndicated_posts = results + if 'SYNDICATED_POSTS' in os.environ: + syndicated_posts = json.loads(os.getenv('SYNDICATED_POSTS')) syndicated_posts.update(results) action_setenv('SYNDICATED_POSTS', json.dumps(syndicated_posts)) + return syndicated_posts +def mark_syndicated_posts(result_set): + assert result_set, "no results to mark as syndicated!" + action_log('marking!!!') -def mark_syndicated_posts(result_set, all=False): - assert results, "no results to mark as syndicated!" - action_log(f'marking {"all" if all else "most recent"}!!!') + for (silo, results) in result_set.items(): + if results['added']: + action_log(f"TODO mark these for {silo}: {results['added']}") + else: + action_log(f"No new posts syndicated to {silo}") - for (silo, result) in result_set.items(): - pass +def commit_post_changes(new_contents_by_post_path): + assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" + assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" + assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" + assert os.getenv("GITHUB_REF"), "GITHUB_REF not available" + parent_sha = os.getenv("GITHUB_SHA") + + ## NOTE + # Following the recipe outlined here for creating a commit consisting of + # multiple file updates: + # https://developer.github.com/v3/git/ + # + # 1. Get the current commit object + # 2. Retrieve the tree it points to + # 3. Retrieve the content of the blob object that tree has for that + # particular file path + # 4. Change the content somehow and post a new blob object with that new + # content, getting a blob SHA back + # 5. Post a new tree object with that file path pointer replaced with your + # new blob SHA getting a tree SHA back + # 6. Create a new commit object with the current commit SHA as the parent + # and the new tree SHA, getting a commit SHA back + # 7. Update the reference of your branch to point to the new commit SHA + ## + + # Create new blobs in the repo's Git database containing the updated contents of our posts. + new_blobs_by_post = {path:repo().create_blob(new_contents, 'utf-8') for (path, new_contents) in new_contents_by_post_path.items()} + # Create a new tree with our updated blobs for the post paths. + new_tree = repo().create_tree( + [ + { + 'path': path, + 'mode': '100644', # 'file', @see https://developer.github.com/v3/git/trees/#tree-object + 'type': 'blob', + 'sha': blob_sha + } + for (path, blob_sha) in new_blobs_by_post.items() + ], + base_tree=parent_sha + ) + # NOTE The github3 package I'm using apparently doesn't support updating refs -_- + # Hand-rolling my own using the Github API directly. + # @see https://developer.github.com/v3/ + headers ={ + 'Authorization': f"token {os.getenv('GITHUB_TOKEN')}", + 'Accept': 'application/vnd.github.v3+json' + } + endpoint = f'https://api.github.com/repos/{os.getenv("GITHUB_REPOSITORY")}/git/{os.getenv("GITHUB_REF")}' + data = { + 'sha': repo().create_commit( + 'test commit', + new_tree.sha, + [parent_sha] + ).sha + } + response = requests.put(endpoint, headers=headers, json=data) + if response.status_code == requests.codes.ok: + return response.json() + else: + action_error(f"Failed to mark syndicated posts: {response.json()}") + return None From 36148ee074b41e011856568d6eaa2bcbec1f4757 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 2 Feb 2020 16:42:35 +0000 Subject: [PATCH 065/105] Shortcircuit when no results fix missing util dep fix input access error real!!!!! dummy edit to trigger rebuild dummy edit to trigger rebuild of entrypoint default job_output to empty set just skip marking if nothing to do trigger syndication fix null result issue i think this werks fix missing dep trigger alt simplify comp to fix only trigger on post change triggerrrrr pyhonn....... tirger dump post WITH frontmatter don't mark posts for silo if already marked tragercaoesutnh whoops, need to lower silo and safe get trager test commit fix id extraction readability delete id, will trigger new draft test commit generate better commit message update workflow ref SHA if commit pooshed try again! syndicate: adding syndicate IDs to post frontmatter add some debug logiing try agan syndicate: adding syndicate IDs to post frontmatter use action-specific env var instead; github is overruling trigerring syndicate: adding syndicate IDs to post frontmatter use correct target SHA everywhere and again syndicate: adding syndicate IDs to post frontmatter debuggos tragir syndicate: adding syndicate IDs to post frontmatter rudimentary 'prevent commit if nothing changed' again syndicate: adding syndicate IDs to post frontmatter delete obsolete commit logic remove debug statements update automated commit message verify edits cleanup trugger (syndicate): adding syndicate IDs to post frontmatter frontmatter seems to chomp trailing newlines trig the flow fix outdated example step Revert "frontmatter seems to chomp trailing newlines" This reverts commit d59f5fdb2792e8081dd616807076c07c793c1b66. try try try again (syndicate): adding syndicate IDs to post frontmatter remove todo; todone protect against empty result set from silo update DEV silo name to match expected input add todo clean up entrypoint a bit cleanup output handling test cleanups --- .github/workflows/alt.yml | 6 +- entrypoint.py | 68 ++++++++++++--- pages/posts/dumb.md | 6 +- syndicate/__init__.py | 15 +--- syndicate/silos/dev.py | 12 +-- syndicate/utils.py | 179 ++++++++++++++++++++------------------ tests/test_dev.py | 6 +- 7 files changed, 171 insertions(+), 121 deletions(-) diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index f8d5dc2..1a30784 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -4,8 +4,8 @@ on: branches: - master - develop - # paths: - # - 'pages/**/*.mdx?' + paths: + - 'pages/**/*.mdx?' jobs: syndicate: @@ -44,7 +44,7 @@ jobs: - name: Report results id: report run: - 'echo "The results are in! ${{ toJSON(env.SYNDICATED_POSTS) }}"' + 'echo "The results are in! ${{ toJSON(env.SYNDICATE_POSTS) }}"' - name: Update posts with syndicate IDs uses: dabrady/syndicate@develop diff --git a/entrypoint.py b/entrypoint.py index 66b0d1d..95c0d4a 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -1,15 +1,16 @@ #!/usr/bin/env python3 +from datetime import datetime import json import os import sys -# NOTE This is where our action module lives in the container. +# NOTE This is where our action module lives in the container # TODO Is there a way to manipulate the path from Dockerfile? ACTION_SOURCE='/action' sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) import syndicate -from syndicate.utils import action_log, action_output, job_output, mark_syndicated_posts +from syndicate.utils import action_log, action_setoutput, job_getoutput, job_setoutput, get_posts, fronted, mark_syndicated_posts action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), @@ -17,14 +18,35 @@ } # Syndicate -results = syndicate.elsewhere(action_inputs['silos']) -action_output('syndicated_posts', results) +posts = get_posts() +if not posts: + action_log("No posts added or updated, nothing to see here.") + action_setoutput("time", datetime.now()) + sys.exit() + + +# Result set format: +# { +# '': { +# 'added': { +# 'post/path': , +# ... +# }, +# 'modified': { +# 'post/path': , +# ... +# } +# }, +# ... +# } +syndicated_posts = syndicate.elsewhere(posts, action_inputs['silos']) or {} +action_setoutput("time", datetime.now()) +action_setoutput('syndicated_posts', syndicated_posts) # Merge output with output of any previous runs -job_results_so_far = job_output(results) +job_setoutput(syndicated_posts) -# Mark as syndicated -if mark_as_syndicated: +if action_inputs['mark_as_syndicated']: ## NOTE # If silos were provided, commit only the results of this step. In the case # where no silos were provided, commit all job results so far. @@ -32,7 +54,31 @@ # This allows us to bundle sydications into as few or many commits as we # want in our workflows. ## - if action_inputs['silos']: - mark_syndicated_posts(results) - else: - mark_syndicated_posts(job_results_so_far) + if not action_inputs['silos']: + syndicated_posts = job_getoutput() + + # Just focus on the added ones. + indexed_paths_by_silo = { + silo: results['added'] + for (silo, results) in syndicated_posts.items() + if results + } + + # { + # 'path/to/post': { + # 'dev': 42, + # 'medium': 'abc123', + # ... + # }, + # ... + # } + syndicate_ids_by_path = {} + for (silo, indexed_paths) in indexed_paths_by_silo.items(): + for (path, id) in indexed_paths.items(): + syndicate_ids_by_path.setdefault(path, {}) + syndicate_ids_by_path[path][silo] = id + + mark_syndicated_posts( + syndicate_ids_by_path, + {post.path:fronted(post) for post in posts} + ) diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index 79d52c1..a96f040 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1 +1,5 @@ -new contents \ No newline at end of file +--- +title: this is now a post +--- + +contents are here!! for realsies. diff --git a/syndicate/__init__.py b/syndicate/__init__.py index f331c4a..bd357fc 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,20 +1,12 @@ -from datetime import datetime -from syndicate.utils import action_log, action_warn, action_output, get_posts +from syndicate.utils import action_log, action_warn import sys import os import importlib.util -def elsewhere(silos): +def elsewhere(posts, silos): if not silos: action_log('No silos specified, nothing to see here.') - action_output("time", datetime.now()) - return None - - posts = get_posts() - if not posts: - action_log("No posts added or updated, nothing to see here.") - action_output("time", datetime.now()) return None action_log(f"You want to publish to these places: {silos}") @@ -29,17 +21,16 @@ def elsewhere(silos): if not all(available_keys.values()): action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") + # TODO just operate on silos for which we have API keys results = { silo:_syndicate(spec, _get_api_key(silo), posts) for (silo,spec) in specs.items() if _has_api_key(silo) } - action_output("time", datetime.now()) return results else: action_warn("Sorry, can't help you.") - action_output("time", datetime.now()) return None ### privates ### diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 031e5e4..97b7674 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,14 +1,14 @@ -from syndicate.utils import action_log_group, action_log, action_warn, action_error, get_canonical_url, yaml_sequence, fronted, id_for, commit_silo_id +from syndicate.utils import action_log_group, action_log, action_warn, action_error, get_canonical_url, yaml_sequence, fronted, syndicate_id_for import requests -SILO = 'dev' +SILO_NAME = 'DEV' -@action_log_group(SILO) +@action_log_group(SILO_NAME) def syndicate(posts, api_key): action_log("Hello? Yes, this is DEV.") return { - 'added': [id for id in (_draft(post, api_key) for post in posts if not id_for(post, SILO)) if id], - 'modified': [id for id in (_update(post, api_key) for post in posts if id_for(post, SILO)) if id] + 'added': {post.path:_draft(post, api_key) for post in posts if not syndicate_id_for(post, SILO_NAME)}, + 'modified': {post.path:_update(post, api_key) for post in posts if syndicate_id_for(post, SILO_NAME)} } ### privates ### @@ -65,7 +65,7 @@ def _update(post, api_key=None): assert api_key, "missing API key" assert post, "missing post" - endpoint = f'https://dev.to/api/articles/{id_for(post, SILO)}' + endpoint = f'https://dev.to/api/articles/{syndicate_id_for(post, SILO_NAME)}' headers = {'api-key': api_key} payload = {'article': { 'body_markdown': post.decoded.decode('utf-8') } } response = requests.put(endpoint, headers=headers, json=payload) diff --git a/syndicate/utils.py b/syndicate/utils.py index 139745c..20d465c 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -1,6 +1,7 @@ import frontmatter import functools import github3 +import json import os import requests @@ -17,9 +18,6 @@ def action_warn(msg): def action_error(msg): print(f"::error::{msg}") -def action_output(key, value): - print(f"::set-output name={key}::{value}") - def action_log_group(title): def _decorator(func): def _wrapper(*args, **kwargs): @@ -33,6 +31,20 @@ def _wrapper(*args, **kwargs): def action_setenv(key, value): print(f"::set-env name={key}::{value}") +def action_setoutput(key, value): + print(f"::set-output name={key}::{value}") + +def job_setoutput(results): + # Compile results for future steps + syndicated_posts = results + if 'SYNDICATE_POSTS' in os.environ: + syndicated_posts = job_getoutput() + syndicated_posts.update(results) + action_setenv('SYNDICATE_POSTS', json.dumps(syndicated_posts)) + +def job_getoutput(): + return json.loads(os.getenv('SYNDICATE_POSTS', '{}')) + # Memoize authentication @functools.lru_cache(maxsize=1) def repo(): @@ -42,17 +54,22 @@ def repo(): gh = github3.login(token=os.getenv("GITHUB_TOKEN")) return gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) -def get_commit_payload(): +## NOTE +## This action may generate a new commit, so we need to be sure we're always +## using the proper SHA. +def target_sha(): assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" - return repo().commit(os.getenv("GITHUB_SHA")).files + return os.getenv('SYNDICATE_SHA', os.getenv("GITHUB_SHA")) + +def get_commit_payload(): + return repo().commit(target_sha()).files def file_contents(filename): - assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" - return repo().file_contents(filename, os.getenv("GITHUB_SHA")) + return repo().file_contents(filename, target_sha()) def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): files = get_commit_payload() - assert files, "commit had no files in its payload" + assert files, "target commit was empty" posts = [file for file in files if file['filename'].startswith(post_dir)] if not posts: @@ -79,79 +96,67 @@ def yaml_sequence(sequence): def fronted(post): assert post, "missing post" + if type(post) == frontmatter.Post: + return post raw_contents = post.decoded.decode('utf-8') return frontmatter.loads(raw_contents) -def id_for(post, silo): +def syndicate_key_for(silo): + return f'{silo.lower()}_syndicate_id' + +def syndicate_id_for(post, silo): assert post, "missing post" assert silo, "missing silo" - return fronted(post).get(f'{silo}_syndicate_id') # TODO extract this template - -# @DEPRECATED, DELETEME -def commit_silo_id(post, post_id, silo): - assert post, "missing post info" - assert post_id, "missing post ID" - assert silo, "silo not specified" - - fronted_post = fronted(post) - fronted_post[f'{silo}_syndicate_id'] = post_id - - action_log(f"Updating frontmatter with ID for {silo}") - pushed_change = post.update( - f'syndicate({silo}): adding post ID to frontmatter', - frontmatter.dumps(fronted_post).encode('utf-8') - ) - action_log(pushed_change) - -def job_output(results): - assert results, "no results to compile!" - - # Compile results for future steps. - syndicated_posts = results - if 'SYNDICATED_POSTS' in os.environ: - syndicated_posts = json.loads(os.getenv('SYNDICATED_POSTS')) - syndicated_posts.update(results) - action_setenv('SYNDICATED_POSTS', json.dumps(syndicated_posts)) - return syndicated_posts - -def mark_syndicated_posts(result_set): - assert result_set, "no results to mark as syndicated!" - action_log('marking!!!') - - for (silo, results) in result_set.items(): - if results['added']: - action_log(f"TODO mark these for {silo}: {results['added']}") - else: - action_log(f"No new posts syndicated to {silo}") - -def commit_post_changes(new_contents_by_post_path): + return fronted(post).get(syndicate_key_for(silo)) + +def mark_syndicated_posts(siloed_ids_by_path, fronted_posts_by_path): + updated_fronted_posts_by_path = {} + for (path, siloed_ids) in siloed_ids_by_path.items(): + fronted_post = fronted_posts_by_path[path] + syndicate_ids = { + syndicate_key_for(silo):sid + for (silo, sid) in siloed_ids.items() + if not syndicate_id_for(fronted_post, silo) # ignore already marked posts + } + # Create new fronted post with old frontmatter merged with syndicate IDs. + updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **syndicate_ids)) + + # Only update if anything changed. + if updated_post.keys() != fronted_post.keys(): + updated_fronted_posts_by_path[path] = updated_post + commit_post_changes(updated_fronted_posts_by_path) + +## NOTE +# Following the recipe outlined here for creating a commit consisting of +# multiple file updates: +# https://developer.github.com/v3/git/ +# +# 1. Get the current commit object +# 2. Retrieve the tree it points to +# 3. Retrieve the content of the blob object that tree has for that +# particular file path +# 4. Change the content somehow and post a new blob object with that new +# content, getting a blob SHA back +# 5. Post a new tree object with that file path pointer replaced with your +# new blob SHA getting a tree SHA back +# 6. Create a new commit object with the current commit SHA as the parent +# and the new tree SHA, getting a commit SHA back +# 7. Update the reference of your branch to point to the new commit SHA +## +def commit_post_changes(fronted_posts_by_path): + if not fronted_posts_by_path: + return None assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" - assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" assert os.getenv("GITHUB_REF"), "GITHUB_REF not available" - parent_sha = os.getenv("GITHUB_SHA") - - ## NOTE - # Following the recipe outlined here for creating a commit consisting of - # multiple file updates: - # https://developer.github.com/v3/git/ - # - # 1. Get the current commit object - # 2. Retrieve the tree it points to - # 3. Retrieve the content of the blob object that tree has for that - # particular file path - # 4. Change the content somehow and post a new blob object with that new - # content, getting a blob SHA back - # 5. Post a new tree object with that file path pointer replaced with your - # new blob SHA getting a tree SHA back - # 6. Create a new commit object with the current commit SHA as the parent - # and the new tree SHA, getting a commit SHA back - # 7. Update the reference of your branch to point to the new commit SHA - ## # Create new blobs in the repo's Git database containing the updated contents of our posts. - new_blobs_by_post = {path:repo().create_blob(new_contents, 'utf-8') for (path, new_contents) in new_contents_by_post_path.items()} - # Create a new tree with our updated blobs for the post paths. + new_blobs_by_path = { + path:repo().create_blob(frontmatter.dumps(fronted_post), 'utf-8') + for (path, fronted_post) in fronted_posts_by_path.items() + } + parent_sha = target_sha() + # Create a new tree with our updated blobs. new_tree = repo().create_tree( [ { @@ -160,27 +165,31 @@ def commit_post_changes(new_contents_by_post_path): 'type': 'blob', 'sha': blob_sha } - for (path, blob_sha) in new_blobs_by_post.items() + for (path, blob_sha) in new_blobs_by_path.items() ], base_tree=parent_sha ) + + # Update the parent tree with our new subtree. # NOTE The github3 package I'm using apparently doesn't support updating refs -_- # Hand-rolling my own using the Github API directly. # @see https://developer.github.com/v3/ - headers ={ - 'Authorization': f"token {os.getenv('GITHUB_TOKEN')}", - 'Accept': 'application/vnd.github.v3+json' - } - endpoint = f'https://api.github.com/repos/{os.getenv("GITHUB_REPOSITORY")}/git/{os.getenv("GITHUB_REF")}' - data = { - 'sha': repo().create_commit( - 'test commit', - new_tree.sha, - [parent_sha] - ).sha - } - response = requests.put(endpoint, headers=headers, json=data) + new_commit = repo().create_commit( + f'(syndicate): adding syndicate IDs to post frontmatter', + new_tree.sha, + [parent_sha] + ) + response = requests.put( + f'https://api.github.com/repos/{os.getenv("GITHUB_REPOSITORY")}/git/{os.getenv("GITHUB_REF")}', + headers={ + 'Authorization': f"token {os.getenv('GITHUB_TOKEN')}", + 'Accept': 'application/vnd.github.v3+json' + }, + json={'sha': new_commit.sha} + ) if response.status_code == requests.codes.ok: + ## NOTE Need to update the reference SHA for future workflow steps. + action_setenv('SYNDICATE_SHA', new_commit.sha) return response.json() else: action_error(f"Failed to mark syndicated posts: {response.json()}") diff --git a/tests/test_dev.py b/tests/test_dev.py index 14158be..ff0ed48 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -1,4 +1,4 @@ -from syndicate.utils import id_for +from syndicate.utils import syndicate_id_for from syndicate.silos import dev from .mocks import MockPost import pytest @@ -71,7 +71,7 @@ def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') mock = MockPost() requests_mock.put( - f"https://dev.to/api/articles/{id_for(mock, dev.SILO)}", + f"https://dev.to/api/articles/{syndicate_id_for(mock, dev.SILO_NAME)}", status_code=requests.codes.unprocessable_entity, json={"error": "you made an unintelligble request"}) assert not dev._update(mock, api_key='fake_api_key') @@ -79,7 +79,7 @@ def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch): def test_update_returns_something_on_success(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') mock = MockPost() - mock_id= id_for(mock, dev.SILO) + mock_id= syndicate_id_for(mock, dev.SILO_NAME) requests_mock.put( f"https://dev.to/api/articles/{mock_id}", status_code=requests.codes.ok, From 72c7d7e65556972772c2cfa1e0ae9737c5d2a021 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 13:13:53 +0000 Subject: [PATCH 066/105] Add placeholder adapter for Medium dummy cleanup to trigger recompilation (syndicate): adding syndicate IDs to post frontmatter test mock medium adaptor test commit at the end (syndicate): adding syndicate IDs to post frontmatter add logging to confirm (syndicate): adding syndicate IDs to post frontmatter --- .github/workflows/alt.yml | 4 +++- entrypoint.py | 5 ++++- pages/posts/dumb.md | 3 ++- syndicate/silos/dev.py | 2 +- syndicate/silos/medium.py | 7 +++++++ syndicate/utils.py | 14 +++++++------- 6 files changed, 24 insertions(+), 11 deletions(-) create mode 100644 syndicate/silos/medium.py diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index 1a30784..4616691 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -26,7 +26,6 @@ jobs: uses: dabrady/syndicate@develop with: silos: DEV - mark_as_syndicated: true env: DEV_API_KEY: ${{ secrets.DEV_API_KEY }} @@ -40,6 +39,9 @@ jobs: silos: | Medium Planet_Mars + env: + MEDIUM_API_KEY: "fake" + PLANET_MARS_API_KEY: "fake" - name: Report results id: report diff --git a/entrypoint.py b/entrypoint.py index 95c0d4a..fe6369d 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -20,7 +20,7 @@ # Syndicate posts = get_posts() if not posts: - action_log("No posts added or updated, nothing to see here.") + action_log("No posts added or updated, nothing to see here...") action_setoutput("time", datetime.now()) sys.exit() @@ -47,6 +47,7 @@ job_setoutput(syndicated_posts) if action_inputs['mark_as_syndicated']: + action_log("Marking newly syndicated posts...") ## NOTE # If silos were provided, commit only the results of this step. In the case # where no silos were provided, commit all job results so far. @@ -82,3 +83,5 @@ syndicate_ids_by_path, {post.path:fronted(post) for post in posts} ) + + action_log("Done.") diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index a96f040..88f5785 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1,5 +1,6 @@ --- +dev_syndicate_id: 254196 title: this is now a post --- -contents are here!! for realsies. +contents are here!! for realsies. \ No newline at end of file diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 97b7674..cb86e35 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -5,7 +5,7 @@ @action_log_group(SILO_NAME) def syndicate(posts, api_key): - action_log("Hello? Yes, this is DEV.") + action_log(f"Hello? Yes, this is {SILO_NAME}.") return { 'added': {post.path:_draft(post, api_key) for post in posts if not syndicate_id_for(post, SILO_NAME)}, 'modified': {post.path:_update(post, api_key) for post in posts if syndicate_id_for(post, SILO_NAME)} diff --git a/syndicate/silos/medium.py b/syndicate/silos/medium.py new file mode 100644 index 0000000..2716599 --- /dev/null +++ b/syndicate/silos/medium.py @@ -0,0 +1,7 @@ +from syndicate.utils import action_log_group, action_warn + +SILO_NAME = 'Medium' +@action_log_group(SILO_NAME) +def syndicate(posts, api_key): + action_warn("not yet implemented") + pass diff --git a/syndicate/utils.py b/syndicate/utils.py index 20d465c..3b901d2 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -48,8 +48,8 @@ def job_getoutput(): # Memoize authentication @functools.lru_cache(maxsize=1) def repo(): - assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" - assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" + assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" + assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" gh = github3.login(token=os.getenv("GITHUB_TOKEN")) return gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) @@ -58,7 +58,7 @@ def repo(): ## This action may generate a new commit, so we need to be sure we're always ## using the proper SHA. def target_sha(): - assert os.getenv("GITHUB_SHA"), "GITHUB_SHA not available" + assert os.getenv("GITHUB_SHA"), "missing GITHUB_SHA" return os.getenv('SYNDICATE_SHA', os.getenv("GITHUB_SHA")) def get_commit_payload(): @@ -79,7 +79,7 @@ def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): return [file_contents(post['filename']) for post in posts] def get_canonical_url(post): - assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" + assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" # return f"https://github.com/{os.getenv('GITHUB_REPOSITORY')}/{post.path}" return post.html_url @@ -146,9 +146,9 @@ def mark_syndicated_posts(siloed_ids_by_path, fronted_posts_by_path): def commit_post_changes(fronted_posts_by_path): if not fronted_posts_by_path: return None - assert os.getenv("GITHUB_TOKEN"), "GITHUB_TOKEN not available" - assert os.getenv("GITHUB_REPOSITORY"), "GITHUB_REPOSITORY not available" - assert os.getenv("GITHUB_REF"), "GITHUB_REF not available" + assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" + assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" + assert os.getenv("GITHUB_REF"), "missing GITHUB_REF" # Create new blobs in the repo's Git database containing the updated contents of our posts. new_blobs_by_path = { From ee985c1f2a1c5a5607372a9b5898acd869486c90 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 16:00:35 +0000 Subject: [PATCH 067/105] Polish action definition --- action.yml | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/action.yml b/action.yml index 1e29feb..6371ab1 100644 --- a/action.yml +++ b/action.yml @@ -1,20 +1,25 @@ name: 'Syndicate' -description: 'Publish content to other places' +description: 'Publish your content elsewhere (P.O.S.S.E)' +branding: + icon: rss + color: red + +runs: + using: 'docker' + image: 'Dockerfile' + inputs: - silos: # id of input - description: 'A list of names indicating the platforms to publish your content to.' + silos: + description: 'A list of platforms to update with your new/changed content' required: false default: '' mark_as_syndicated: - description: 'Set this to true to update the frontmatter of new posts with their syndicate IDs' + description: 'Commit syndicate IDs to the frontmatter of newly syndicated posts for synchronization purposes' required: false default: false + outputs: - # TODO Change this to a map of publish times keyed by silo - time: # id of output + time: description: 'The time this action finished' syndicated_posts: - description: 'A JSON object reporting the results of syndicating to the specified silos.' -runs: - using: 'docker' - image: 'Dockerfile' + description: 'A JSON object mapping silos to the posts that were added or modified on those platforms' From a096bd13f504dcad06552a035dfa23605c3d4f4c Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 16:01:20 +0000 Subject: [PATCH 068/105] Polish alternate workflow --- .github/workflows/alt.yml | 53 ++++++++++++++++++++++++++------------- 1 file changed, 35 insertions(+), 18 deletions(-) diff --git a/.github/workflows/alt.yml b/.github/workflows/alt.yml index 4616691..3fc6961 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/alt.yml @@ -1,9 +1,10 @@ -name: "An alternate way to syndicate" +name: Example syndicate flow on: push: + # Watch these branches... branches: - master - - develop + # ...for changes to these files. paths: - 'pages/**/*.mdx?' @@ -17,38 +18,54 @@ jobs: # @see https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # Tell me the path (relative to the project root) where your posts live, so I can find them. - # Defaults to 'posts'. + # Tell me the path (relative to the project root) where your content lives + # so I can find them in the commit that triggered this flow. + # Defaults to 'posts' if not specified. SYNDICATE_POST_DIR: 'pages/posts' + steps: + # This step will syndicate your content to DEV.to, if supported, but will + # NOT commit the platform-specific content IDs back to your repo. This is + # useful for bundling the results of multiple actions into a single commit. + # + # However, note that future syndications to DEV.to will be unaware of the + # drafts already created by this step and will result in duplicate drafts. + # + # To avoid this, include a future step which does not specify any silos, but + # sets the `mark_as_syndicated` flag to true. This will commit the syndicate + # IDs that have not already been added during this job. - name: Push to DEV.to - id: DEV uses: dabrady/syndicate@develop with: silos: DEV env: DEV_API_KEY: ${{ secrets.DEV_API_KEY }} - - name: Report DEV.to results - run: 'echo "${{ toJSON(steps.DEV.outputs) }}"' - - - name: Push to Medium and Planet Mars + # This step will syndicate your content to Medium and Planet Mars, if + # supported, and will commit any new platform-specific content IDs to the + # appropriate files. + # + # Note that this will NOT commit IDs generated by previous actions, just + # the ones from this action. + - name: Push to Medium and Planet Mars and commit new IDs uses: dabrady/syndicate@develop - id: Medium with: silos: | Medium Planet_Mars + mark_as_syndicated: true env: - MEDIUM_API_KEY: "fake" - PLANET_MARS_API_KEY: "fake" - - - name: Report results - id: report - run: - 'echo "The results are in! ${{ toJSON(env.SYNDICATE_POSTS) }}"' + MEDIUM_API_KEY: ${{ secrets.MEDIUM_API_KEY }} + PLANET_MARS_API_KEY: ${{ secrets.MARS_API_KEY }} - - name: Update posts with syndicate IDs + # This step will not syndicate any content to any silos, but instead will + # commit any new platform-specific content IDs generated by previous steps + # in this job to the appropriate files, if they haven't already been added. + # + # It's a nice way of bundling the results of multiple steps into a single + # commit, or just cleaning up at the end of a job and you didn't miss + # anything. + - name: Commit IDs of newly syndicated posts if necessary uses: dabrady/syndicate@develop with: mark_as_syndicated: true From 2ba6c4bc20c5f1b90fa62d926d8c90934300ea67 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 16:06:59 +0000 Subject: [PATCH 069/105] Delete original flow; alt is better --- .github/workflows/main.yml | 45 -------------------------------------- 1 file changed, 45 deletions(-) delete mode 100644 .github/workflows/main.yml diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml deleted file mode 100644 index 170038d..0000000 --- a/.github/workflows/main.yml +++ /dev/null @@ -1,45 +0,0 @@ -on: - push: - branches: - - master - ## Working on alternate flow for now - # - develop - paths: - - 'pages/**/*.mdx?' - -jobs: - syndicate: - runs-on: ubuntu-latest - name: Example syndicate workflow - steps: - - name: Syndicate to silos - id: syndicate - uses: dabrady/syndicate@develop - with: - # Using a YAML multiline string as a workaround for a list argument. - # @see https://github.community/t5/GitHub-Actions/Can-action-inputs-be-arrays/td-p/33776 - # Names are snake_case, case-insensitive. - silos: | - DEV - Medium - Planet_Mars - env: - # This is provided to all actions by Github, and needed to access the posts - # in your repository. - # @see https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # Tell me the path (relative to the project root) where your posts live, so I can find them. - # Defaults to 'posts'. - SYNDICATE_POST_DIR: 'pages/posts' - # Set necessary API keys as secrets of your repo and specify them here in this format: - # - # _API_KEY - # - # e.g. - # - # MEDIUM_API_KEY - # - # so that the action can find them easily when needed. - DEV_API_KEY: ${{ secrets.DEV_API_KEY }} - - name: Get the output time - run: echo "The time was ${{ steps.syndicate.outputs.time }}" From acec696bb75848b4958ae637dcf755d31c590d54 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 17:00:03 +0000 Subject: [PATCH 070/105] Rename alt flow to example re-enable dev mode improve entrypoint documentation cleanup 'mark_syndicated_posts' --- .github/workflows/{alt.yml => example.yml} | 1 + entrypoint.py | 13 +++++---- pages/posts/dumb.md | 2 +- syndicate/utils.py | 31 ++++++++++++++-------- 4 files changed, 30 insertions(+), 17 deletions(-) rename .github/workflows/{alt.yml => example.yml} (99%) diff --git a/.github/workflows/alt.yml b/.github/workflows/example.yml similarity index 99% rename from .github/workflows/alt.yml rename to .github/workflows/example.yml index 3fc6961..d7d4057 100644 --- a/.github/workflows/alt.yml +++ b/.github/workflows/example.yml @@ -4,6 +4,7 @@ on: # Watch these branches... branches: - master + - develop # ...for changes to these files. paths: - 'pages/**/*.mdx?' diff --git a/entrypoint.py b/entrypoint.py index fe6369d..ac30bb2 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -24,16 +24,15 @@ action_setoutput("time", datetime.now()) sys.exit() - # Result set format: # { # '': { # 'added': { -# 'post/path': , +# 'path/to/new_post': , # ... # }, # 'modified': { -# 'post/path': , +# 'path/to/updated_post': , # ... # } # }, @@ -65,10 +64,14 @@ if results } + if not indexed_paths_by_silo: + action_log("Nothing new to mark.") + sys.exit() + # { # 'path/to/post': { - # 'dev': 42, - # 'medium': 'abc123', + # '': 42, + # '': 'abc123', # ... # }, # ... diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index 88f5785..6fedc71 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -3,4 +3,4 @@ dev_syndicate_id: 254196 title: this is now a post --- -contents are here!! for realsies. \ No newline at end of file +contents are here! for realsies. diff --git a/syndicate/utils.py b/syndicate/utils.py index 3b901d2..506c0b2 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -109,22 +109,31 @@ def syndicate_id_for(post, silo): assert silo, "missing silo" return fronted(post).get(syndicate_key_for(silo)) -def mark_syndicated_posts(siloed_ids_by_path, fronted_posts_by_path): +def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): updated_fronted_posts_by_path = {} - for (path, siloed_ids) in siloed_ids_by_path.items(): + for (path, syndicate_ids_by_silo) in syndicate_ids_by_path.items(): fronted_post = fronted_posts_by_path[path] - syndicate_ids = { + + # Format: + # { + # 'silo_a_syndicate_id': 42, + # 'silo_b_syndicate_id': 'abc123', + # ... + # } + new_syndicate_ids = { syndicate_key_for(silo):sid - for (silo, sid) in siloed_ids.items() - if not syndicate_id_for(fronted_post, silo) # ignore already marked posts + for (silo, sid) in syndicate_ids_by_silo.items() + # Ignore already posts already marked with this silo + if not syndicate_id_for(fronted_post, silo) } - # Create new fronted post with old frontmatter merged with syndicate IDs. - updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **syndicate_ids)) + # Only add to commit if there're any new IDs to add. + if not new_syndicate_ids: + continue - # Only update if anything changed. - if updated_post.keys() != fronted_post.keys(): - updated_fronted_posts_by_path[path] = updated_post - commit_post_changes(updated_fronted_posts_by_path) + # Create new fronted post with old frontmatter merged with syndicate IDs. + updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **new_syndicate_ids)) + updated_fronted_posts_by_path[path] = updated_post + return commit_post_changes(updated_fronted_posts_by_path) ## NOTE # Following the recipe outlined here for creating a commit consisting of From 4e0a243f3ec5823a8383dfdc76648e64ebaa9df4 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 17:30:52 +0000 Subject: [PATCH 071/105] Ignore deleted files clarify documentation --- syndicate/utils.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/syndicate/utils.py b/syndicate/utils.py index 506c0b2..4b5fffb 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -45,7 +45,7 @@ def job_setoutput(results): def job_getoutput(): return json.loads(os.getenv('SYNDICATE_POSTS', '{}')) -# Memoize authentication +# Memoize authentication and repo fetching. @functools.lru_cache(maxsize=1) def repo(): assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" @@ -55,7 +55,7 @@ def repo(): return gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) ## NOTE -## This action may generate a new commit, so we need to be sure we're always +## Our action may generate a new commit, so we need to be sure we're always ## using the proper SHA. def target_sha(): assert os.getenv("GITHUB_SHA"), "missing GITHUB_SHA" @@ -75,8 +75,8 @@ def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): if not posts: return None else: - # Don't care about the Git status: it might not be in sync with the silo - return [file_contents(post['filename']) for post in posts] + # Ignore deleted files. + return [file_contents(post['filename']) for post in posts if post['status'] != 'deleted'] def get_canonical_url(post): assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" From 7b28057c9a7a349fcc653f6d19c2ab596e4db3fb Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 17:38:38 +0000 Subject: [PATCH 072/105] Testing out simplified DEV drafting (syndicate): adding syndicate IDs to post frontmatter --- pages/posts/dumb.md | 4 ++-- syndicate/silos/dev.py | 29 +++++++++-------------------- 2 files changed, 11 insertions(+), 22 deletions(-) diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index 6fedc71..fb420d5 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1,6 +1,6 @@ --- -dev_syndicate_id: 254196 +dev_syndicate_id: 254362 title: this is now a post --- -contents are here! for realsies. +contents are here! for realsies. \ No newline at end of file diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index cb86e35..2ff2ca2 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -47,9 +47,16 @@ def _fetch(post_id=None, api_key=None): def _draft(post, api_key=None): assert api_key, "missing API key" assert post, "missing post" + assert fronted(post).get('title'), "article is missing a title" - payload = _payload_for(post) - + payload = { + 'article': { + # NOTE This can be overridden by explicitly setting 'published' in + # the frontmatter. + 'published': False, + 'body_markdown': post.decoded.decode('utf-8') + } + } endpoint = "https://dev.to/api/articles" headers = {'api-key': api_key} response = requests.post(endpoint, headers=headers, json=payload) @@ -75,21 +82,3 @@ def _update(post, api_key=None): else: results = response.json() return results['id'] - -def _payload_for(post): - assert post, "missing post" - - fronted_post = fronted(post) - assert fronted_post.get('title'), "article is missing a title" - - # TODO test if can be accomplished by just sending raw contents as body_markdown - return { - 'article': { - 'title': fronted_post['title'], - 'published': False, - 'tags': yaml_sequence(fronted_post.get('tags', [])), - 'series': fronted_post.get('series', None), - 'canonical_url': get_canonical_url(post), - 'body_markdown': fronted_post.content - } - } From 068482f2320adf47914f7b41f7b9840ab8a0a357 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 17:53:09 +0000 Subject: [PATCH 073/105] Include silos in commit list trigger new commit (syndicate): adding IDs for {'DEV'} trigger update on DEV, and (fake) draft to Medium --- pages/posts/dumb.md | 4 ++-- syndicate/silos/medium.py | 8 ++++++-- syndicate/utils.py | 8 +++++--- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index fb420d5..a08ac53 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1,6 +1,6 @@ --- -dev_syndicate_id: 254362 +dev_syndicate_id: 254424 title: this is now a post --- -contents are here! for realsies. \ No newline at end of file +contents are here! for realsies diff --git a/syndicate/silos/medium.py b/syndicate/silos/medium.py index 2716599..159b681 100644 --- a/syndicate/silos/medium.py +++ b/syndicate/silos/medium.py @@ -1,7 +1,11 @@ -from syndicate.utils import action_log_group, action_warn +from syndicate.utils import action_log_group, action_warn, syndicate_id_for SILO_NAME = 'Medium' @action_log_group(SILO_NAME) def syndicate(posts, api_key): action_warn("not yet implemented") - pass + action_warn("using mock data for testing") + + return { + 'added': {post.path:4 for post in posts if not syndicate_id_for(post, SILO_NAME)} + } diff --git a/syndicate/utils.py b/syndicate/utils.py index 4b5fffb..c952f92 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -111,6 +111,7 @@ def syndicate_id_for(post, silo): def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): updated_fronted_posts_by_path = {} + silos = set() for (path, syndicate_ids_by_silo) in syndicate_ids_by_path.items(): fronted_post = fronted_posts_by_path[path] @@ -133,7 +134,8 @@ def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): # Create new fronted post with old frontmatter merged with syndicate IDs. updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **new_syndicate_ids)) updated_fronted_posts_by_path[path] = updated_post - return commit_post_changes(updated_fronted_posts_by_path) + silos.update(syndicate_ids_by_silo.keys()) + return commit_post_changes(updated_fronted_posts_by_path, silos) ## NOTE # Following the recipe outlined here for creating a commit consisting of @@ -152,7 +154,7 @@ def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): # and the new tree SHA, getting a commit SHA back # 7. Update the reference of your branch to point to the new commit SHA ## -def commit_post_changes(fronted_posts_by_path): +def commit_post_changes(fronted_posts_by_path, silos): if not fronted_posts_by_path: return None assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" @@ -184,7 +186,7 @@ def commit_post_changes(fronted_posts_by_path): # Hand-rolling my own using the Github API directly. # @see https://developer.github.com/v3/ new_commit = repo().create_commit( - f'(syndicate): adding syndicate IDs to post frontmatter', + f'(syndicate): adding IDs for {silos}', new_tree.sha, [parent_sha] ) From c2edcec1ec62c08ee78b4c735a59e19d52a7e163 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 18:46:25 +0000 Subject: [PATCH 074/105] Compress syndication --- syndicate/__init__.py | 45 ++++++++++++++++++++----------------------- 1 file changed, 21 insertions(+), 24 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index bd357fc..b3a8554 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -1,8 +1,9 @@ from syndicate.utils import action_log, action_warn -import sys -import os +import functools import importlib.util +import os +import sys def elsewhere(posts, silos): if not silos: @@ -11,33 +12,32 @@ def elsewhere(posts, silos): action_log(f"You want to publish to these places: {silos}") - specs = {silo:_locate(silo) for silo in silos} - recognized_silos = {silo:spec for (silo,spec) in specs.items() if spec} - available_keys = {silo:_has_api_key(silo) for silo in recognized_silos.keys()} - - if recognized_silos and any(available_keys.values()): - action_log(f"I know how to publish to these places: {list(recognized_silos.keys())}") - action_log("I'll do what I can.") - if not all(available_keys.values()): - action_log(f"But I don't have API keys for these places: {[silo for (silo, available) in available_keys.items() if not available]}") - - # TODO just operate on silos for which we have API keys - results = { - silo:_syndicate(spec, _get_api_key(silo), posts) - for (silo,spec) in specs.items() - if _has_api_key(silo) - } - + specs = {silo:_locate(silo) for silo in silos if _locate(silo)} + if specs.keys() != silos: + action_warn(f"I don't know how to publish to these places: { [silo for silo in silos if silo not in specs] }") + + api_keys = {silo:_get_api_key(silo) for silo in silos if _get_api_key(silo)} + if api_keys.keys() != silos: + action_warn(f"I don't have API keys for these places: { [silo for silo in silos if silo not in api_keys] }") + + action_log("I'll do what I can.") + results = { + silo:_syndicate(spec, api_keys[silo], posts) + for (silo, spec) in specs.items() + if silo in api_keys + } + if results: return results else: - action_warn("Sorry, can't help you.") + action_warn("Sorry, can't do anything with that.") return None ### privates ### _API_KEY = lambda s: f"{s.upper()}_API_KEY" +@functools.lru_cache(max_size=10) def _locate(silo): - return importlib.util.find_spec(f'syndicate.silos.{silo.lower()}') + return importlib.util.find_spec(f'syndicate.silos.{silo.lower()}').getattr('name', None) def _syndicate(silo_spec, api_key, posts): if silo_spec and api_key: @@ -45,8 +45,5 @@ def _syndicate(silo_spec, api_key, posts): else: return None -def _has_api_key(silo): - return _API_KEY(silo) in os.environ - def _get_api_key(silo): return os.getenv(_API_KEY(silo)) From 8e9ec4f3edd232b1f3e4c6bb73fdc5071c805a8f Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 18:48:51 +0000 Subject: [PATCH 075/105] De-dupe silo input list let's trigger that again gahhhh uhhhhhhnnnnggggghhh ermehgerdwutwhodoesthat for reals noap ima ijit le poosh why is the code staleeeee -_- (syndicate): adding IDs for {'Medium'} why aren't they lists add loggings (syndicate): adding IDs for {'Medium'} try pretty printing DEV results --- .github/workflows/example.yml | 2 +- entrypoint.py | 2 -- pages/posts/dumb.md | 3 ++- syndicate/__init__.py | 14 ++++++++------ syndicate/silos/dev.py | 6 +++++- syndicate/utils.py | 2 ++ 6 files changed, 18 insertions(+), 11 deletions(-) diff --git a/.github/workflows/example.yml b/.github/workflows/example.yml index d7d4057..865770b 100644 --- a/.github/workflows/example.yml +++ b/.github/workflows/example.yml @@ -56,7 +56,7 @@ jobs: Planet_Mars mark_as_syndicated: true env: - MEDIUM_API_KEY: ${{ secrets.MEDIUM_API_KEY }} + MEDIUM_API_KEY: 'fake_key' #${{ secrets.MEDIUM_API_KEY }} PLANET_MARS_API_KEY: ${{ secrets.MARS_API_KEY }} # This step will not syndicate any content to any silos, but instead will diff --git a/entrypoint.py b/entrypoint.py index ac30bb2..4cc1aad 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -86,5 +86,3 @@ syndicate_ids_by_path, {post.path:fronted(post) for post in posts} ) - - action_log("Done.") diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md index a08ac53..b7b1e32 100644 --- a/pages/posts/dumb.md +++ b/pages/posts/dumb.md @@ -1,6 +1,7 @@ --- dev_syndicate_id: 254424 +medium_syndicate_id: 4 title: this is now a post --- -contents are here! for realsies +contents are here! for realsies? for realsies. -__- \ No newline at end of file diff --git a/syndicate/__init__.py b/syndicate/__init__.py index b3a8554..f2cd46c 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -10,14 +10,16 @@ def elsewhere(posts, silos): action_log('No silos specified, nothing to see here.') return None + # De-dupe. + silos = list(set(silos)) action_log(f"You want to publish to these places: {silos}") specs = {silo:_locate(silo) for silo in silos if _locate(silo)} - if specs.keys() != silos: + if list(specs.keys()) != silos: action_warn(f"I don't know how to publish to these places: { [silo for silo in silos if silo not in specs] }") api_keys = {silo:_get_api_key(silo) for silo in silos if _get_api_key(silo)} - if api_keys.keys() != silos: + if list(api_keys.keys()) != silos: action_warn(f"I don't have API keys for these places: { [silo for silo in silos if silo not in api_keys] }") action_log("I'll do what I can.") @@ -29,19 +31,19 @@ def elsewhere(posts, silos): if results: return results else: - action_warn("Sorry, can't do anything with that.") + action_warn("Sorry, can't do anything with that!") return None ### privates ### _API_KEY = lambda s: f"{s.upper()}_API_KEY" -@functools.lru_cache(max_size=10) +@functools.lru_cache(maxsize=10) def _locate(silo): - return importlib.util.find_spec(f'syndicate.silos.{silo.lower()}').getattr('name', None) + return getattr(importlib.util.find_spec(f'syndicate.silos.{silo.lower()}'), 'name', None) def _syndicate(silo_spec, api_key, posts): if silo_spec and api_key: - return importlib.import_module(silo_spec.name).syndicate(posts, api_key) + return importlib.import_module(silo_spec).syndicate(posts, api_key) else: return None diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 2ff2ca2..115abce 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,15 +1,19 @@ from syndicate.utils import action_log_group, action_log, action_warn, action_error, get_canonical_url, yaml_sequence, fronted, syndicate_id_for import requests +import pprint SILO_NAME = 'DEV' @action_log_group(SILO_NAME) def syndicate(posts, api_key): action_log(f"Hello? Yes, this is {SILO_NAME}.") - return { + results = { 'added': {post.path:_draft(post, api_key) for post in posts if not syndicate_id_for(post, SILO_NAME)}, 'modified': {post.path:_update(post, api_key) for post in posts if syndicate_id_for(post, SILO_NAME)} } + action_log("The results are in:") + action_log(pprint.pformat(results)) + return results ### privates ### diff --git a/syndicate/utils.py b/syndicate/utils.py index c952f92..aa2d80b 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -156,6 +156,7 @@ def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): ## def commit_post_changes(fronted_posts_by_path, silos): if not fronted_posts_by_path: + action_log("All good: nothing to change.") return None assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" @@ -201,6 +202,7 @@ def commit_post_changes(fronted_posts_by_path, silos): if response.status_code == requests.codes.ok: ## NOTE Need to update the reference SHA for future workflow steps. action_setenv('SYNDICATE_SHA', new_commit.sha) + action_log("Syndicate posts marked.") return response.json() else: action_error(f"Failed to mark syndicated posts: {response.json()}") From 3582c1ebdee46491205b50b4e3984464cd069821 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 19:20:46 +0000 Subject: [PATCH 076/105] Add new fake silo, but don't configure API key let's give it a try (syndicate): adding IDs for {'Medium'} (syndicate): adding IDs for {'Medium', 'DEV'} --- .github/workflows/example.yml | 1 + pages/posts/new_dumb.md | 7 +++++++ syndicate/silos/reddit.py | 11 +++++++++++ 3 files changed, 19 insertions(+) create mode 100644 pages/posts/new_dumb.md create mode 100644 syndicate/silos/reddit.py diff --git a/.github/workflows/example.yml b/.github/workflows/example.yml index 865770b..56ee510 100644 --- a/.github/workflows/example.yml +++ b/.github/workflows/example.yml @@ -53,6 +53,7 @@ jobs: with: silos: | Medium + Reddit Planet_Mars mark_as_syndicated: true env: diff --git a/pages/posts/new_dumb.md b/pages/posts/new_dumb.md new file mode 100644 index 0000000..7ccf69c --- /dev/null +++ b/pages/posts/new_dumb.md @@ -0,0 +1,7 @@ +--- +dev_syndicate_id: 254528 +medium_syndicate_id: 4 +title: a real post +--- + +whatevs \ No newline at end of file diff --git a/syndicate/silos/reddit.py b/syndicate/silos/reddit.py new file mode 100644 index 0000000..73b4fa2 --- /dev/null +++ b/syndicate/silos/reddit.py @@ -0,0 +1,11 @@ +from syndicate.utils import action_log_group, action_warn, syndicate_id_for + +SILO_NAME = 'Reddit' +@action_log_group(SILO_NAME) +def syndicate(posts, api_key): + action_warn("not yet implemented") + action_warn("using mock data for testing") + + return { + 'added': {post.path:13 for post in posts if not syndicate_id_for(post, SILO_NAME)} + } From 8015bb3d2d465d9b6840739a931bdfe8184087c4 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 19:37:34 +0000 Subject: [PATCH 077/105] Make commit message more accurate rerun (syndicate): adding IDs for {'Medium'} and agin (syndicate): adding IDs for {'Medium'} (syndicate): adding IDs for {'DEV'} --- pages/posts/new_dumb.md | 2 +- syndicate/utils.py | 18 +++++++++--------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/pages/posts/new_dumb.md b/pages/posts/new_dumb.md index 7ccf69c..9a2860a 100644 --- a/pages/posts/new_dumb.md +++ b/pages/posts/new_dumb.md @@ -1,5 +1,5 @@ --- -dev_syndicate_id: 254528 +dev_syndicate_id: 254535 medium_syndicate_id: 4 title: a real post --- diff --git a/syndicate/utils.py b/syndicate/utils.py index aa2d80b..d02d0a2 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -111,7 +111,7 @@ def syndicate_id_for(post, silo): def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): updated_fronted_posts_by_path = {} - silos = set() + silos_included = set() for (path, syndicate_ids_by_silo) in syndicate_ids_by_path.items(): fronted_post = fronted_posts_by_path[path] @@ -121,12 +121,13 @@ def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): # 'silo_b_syndicate_id': 'abc123', # ... # } - new_syndicate_ids = { - syndicate_key_for(silo):sid - for (silo, sid) in syndicate_ids_by_silo.items() + new_syndicate_ids = {} + for (silo, sid) in syndicate_ids_by_silo.items(): # Ignore already posts already marked with this silo - if not syndicate_id_for(fronted_post, silo) - } + if not syndicate_id_for(fronted_post, silo): + new_syndicate_ids[syndicate_key_for(silo)] = sid + silos_included.add(silo) + # Only add to commit if there're any new IDs to add. if not new_syndicate_ids: continue @@ -134,8 +135,7 @@ def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): # Create new fronted post with old frontmatter merged with syndicate IDs. updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **new_syndicate_ids)) updated_fronted_posts_by_path[path] = updated_post - silos.update(syndicate_ids_by_silo.keys()) - return commit_post_changes(updated_fronted_posts_by_path, silos) + return commit_post_changes(updated_fronted_posts_by_path, silos_included) ## NOTE # Following the recipe outlined here for creating a commit consisting of @@ -156,7 +156,7 @@ def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): ## def commit_post_changes(fronted_posts_by_path, silos): if not fronted_posts_by_path: - action_log("All good: nothing to change.") + action_log("All good: already marked.") return None assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" From 51d80fe4ef45d87581ceefa234d84cfacc0c97a9 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 20:01:00 +0000 Subject: [PATCH 078/105] Cleanup example flow remove newline --- .github/workflows/example.yml | 5 ++--- syndicate/silos/dev.py | 1 - 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/example.yml b/.github/workflows/example.yml index 56ee510..cc23cc3 100644 --- a/.github/workflows/example.yml +++ b/.github/workflows/example.yml @@ -4,7 +4,6 @@ on: # Watch these branches... branches: - master - - develop # ...for changes to these files. paths: - 'pages/**/*.mdx?' @@ -51,13 +50,13 @@ jobs: - name: Push to Medium and Planet Mars and commit new IDs uses: dabrady/syndicate@develop with: + # Use a multiline YAML string to specify multiple silos. silos: | Medium - Reddit Planet_Mars mark_as_syndicated: true env: - MEDIUM_API_KEY: 'fake_key' #${{ secrets.MEDIUM_API_KEY }} + MEDIUM_API_KEY: ${{ secrets.MEDIUM_API_KEY }} PLANET_MARS_API_KEY: ${{ secrets.MARS_API_KEY }} # This step will not syndicate any content to any silos, but instead will diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 115abce..7de1221 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -3,7 +3,6 @@ import pprint SILO_NAME = 'DEV' - @action_log_group(SILO_NAME) def syndicate(posts, api_key): action_log(f"Hello? Yes, this is {SILO_NAME}.") From e0c8ce9922bb451a449a1b47ee02ec7d22353171 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 20:02:53 +0000 Subject: [PATCH 079/105] Delete test files --- pages/posts/dumb.md | 7 ------- pages/posts/new_dumb.md | 7 ------- syndicate/silos/medium.py | 11 ----------- syndicate/silos/reddit.py | 11 ----------- 4 files changed, 36 deletions(-) delete mode 100644 pages/posts/dumb.md delete mode 100644 pages/posts/new_dumb.md delete mode 100644 syndicate/silos/medium.py delete mode 100644 syndicate/silos/reddit.py diff --git a/pages/posts/dumb.md b/pages/posts/dumb.md deleted file mode 100644 index b7b1e32..0000000 --- a/pages/posts/dumb.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -dev_syndicate_id: 254424 -medium_syndicate_id: 4 -title: this is now a post ---- - -contents are here! for realsies? for realsies. -__- \ No newline at end of file diff --git a/pages/posts/new_dumb.md b/pages/posts/new_dumb.md deleted file mode 100644 index 9a2860a..0000000 --- a/pages/posts/new_dumb.md +++ /dev/null @@ -1,7 +0,0 @@ ---- -dev_syndicate_id: 254535 -medium_syndicate_id: 4 -title: a real post ---- - -whatevs \ No newline at end of file diff --git a/syndicate/silos/medium.py b/syndicate/silos/medium.py deleted file mode 100644 index 159b681..0000000 --- a/syndicate/silos/medium.py +++ /dev/null @@ -1,11 +0,0 @@ -from syndicate.utils import action_log_group, action_warn, syndicate_id_for - -SILO_NAME = 'Medium' -@action_log_group(SILO_NAME) -def syndicate(posts, api_key): - action_warn("not yet implemented") - action_warn("using mock data for testing") - - return { - 'added': {post.path:4 for post in posts if not syndicate_id_for(post, SILO_NAME)} - } diff --git a/syndicate/silos/reddit.py b/syndicate/silos/reddit.py deleted file mode 100644 index 73b4fa2..0000000 --- a/syndicate/silos/reddit.py +++ /dev/null @@ -1,11 +0,0 @@ -from syndicate.utils import action_log_group, action_warn, syndicate_id_for - -SILO_NAME = 'Reddit' -@action_log_group(SILO_NAME) -def syndicate(posts, api_key): - action_warn("not yet implemented") - action_warn("using mock data for testing") - - return { - 'added': {post.path:13 for post in posts if not syndicate_id_for(post, SILO_NAME)} - } From d70561f26f46a8b9247be0f20c90ffc905b537b3 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 20:08:25 +0000 Subject: [PATCH 080/105] Delete unused DEV feature: 'fetch' --- syndicate/silos/dev.py | 29 ----------------------------- tests/test_dev.py | 30 ------------------------------ 2 files changed, 59 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 7de1221..6a98f4a 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -18,35 +18,6 @@ def syndicate(posts, api_key): ## This is a simple semantic wrapper around the DEV API, currently in beta. -# NOTE Not currently used -def _fetch(post_id=None, api_key=None): - assert api_key, "missing API key" - - headers = {'api-key': api_key} - if post_id: - # Fetch data for given post ID - ## NOTE Currently, there's no way to fetch data for a specific post. - ## The workaround I'm using here is the best we can do: fetch and search. - endpoint = "https://dev.to/api/articles/me/all" - post_data = None - page = 0 - while not post_data: - page += 1 - response = requests.get(endpoint, params={ 'page': page }, headers=headers) - response.raise_for_status() # raise error if bad request - posts = response.json() - if posts: - post_data = next((data for data in posts if data['id'] == post_id), None) - else: - break; # No more posts to fetch - return post_data - else: - # Fetch all post data - endpoint = "https://dev.to/api/articles/me/all" - response = requests.get(endpoint, headers=headers) - response.raise_for_status() # raise error if bad request - return response.json() - def _draft(post, api_key=None): assert api_key, "missing API key" assert post, "missing post" diff --git a/tests/test_dev.py b/tests/test_dev.py index ff0ed48..99ad26d 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -4,36 +4,6 @@ import pytest import requests import requests_mock -import re - -def test_fetch_error_when_api_key_missing(): - with pytest.raises(AssertionError): - dev._fetch() - -def test_fetch_request_all_posts(requests_mock): - fake_results = [] - requests_mock.get("https://dev.to/api/articles/me/all", json=fake_results) - results = dev._fetch(api_key='fake_api_key') - assert results == fake_results - -def test_fetch_request_specific_post(requests_mock): - fake_post_id = 13 - requests_mock.get("https://dev.to/api/articles/me/all", json=[{'id':fake_post_id}]) - results = dev._fetch(fake_post_id, api_key='fake_api_key') - assert results['id'] == fake_post_id - - -def test_fetch_request_invalid_post(requests_mock): - invalid_post_id = 13 - def fake_results(req, con): - # Ugh, query string parsing. But they don't expose the params at the top-level, so.... - if int( re.search(r'page=(\d+)', req.query).group(1) ) == 1: - return [{"id": invalid_post_id + 1}] - else: - return [] - requests_mock.get("https://dev.to/api/articles/me/all", json=fake_results) - results = dev._fetch(invalid_post_id, api_key='fake_api_key') - assert results is None def test_draft_error_when_api_key_missing(): with pytest.raises(AssertionError): From 8a6e794fdf3130e93da3e3343bcd7c67c21fa704 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 20:10:35 +0000 Subject: [PATCH 081/105] Compress 'get_posts' --- syndicate/utils.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/syndicate/utils.py b/syndicate/utils.py index d02d0a2..3bafba1 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -72,11 +72,8 @@ def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): assert files, "target commit was empty" posts = [file for file in files if file['filename'].startswith(post_dir)] - if not posts: - return None - else: - # Ignore deleted files. - return [file_contents(post['filename']) for post in posts if post['status'] != 'deleted'] + # Ignore deleted files. + return [file_contents(post['filename']) for post in posts if post['status'] != 'deleted'] def get_canonical_url(post): assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" From 8a2c5368cc08cfa36a7b8dc8548a2fb88f446d6f Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 20:13:07 +0000 Subject: [PATCH 082/105] Delete unused utilities --- syndicate/silos/dev.py | 2 +- syndicate/utils.py | 16 ---------------- 2 files changed, 1 insertion(+), 17 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 6a98f4a..be31335 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,4 +1,4 @@ -from syndicate.utils import action_log_group, action_log, action_warn, action_error, get_canonical_url, yaml_sequence, fronted, syndicate_id_for +from syndicate.utils import action_log_group, action_log, action_error, fronted, syndicate_id_for import requests import pprint diff --git a/syndicate/utils.py b/syndicate/utils.py index 3bafba1..e9654c3 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -75,22 +75,6 @@ def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): # Ignore deleted files. return [file_contents(post['filename']) for post in posts if post['status'] != 'deleted'] -def get_canonical_url(post): - assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" - # return f"https://github.com/{os.getenv('GITHUB_REPOSITORY')}/{post.path}" - return post.html_url - -def yaml_sequence(sequence): - JUST_GIVE_IT_BACK = lambda s: s - cases = { - # Support simple comma-separated YAML sequences - type(''): lambda s: [item.strip() for item in sequence.split(',')], - # If the YAML sequence has already been processed into a list, just give it back - type([]): JUST_GIVE_IT_BACK - } - # If I know how to handle it, handle it; otherwise, just give it back - return cases.get(type(sequence), JUST_GIVE_IT_BACK)(sequence) - def fronted(post): assert post, "missing post" if type(post) == frontmatter.Post: From e600897e48423410847d77ba10e7cb1d9c5ae97e Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 20:21:28 +0000 Subject: [PATCH 083/105] This is not necessary --- .github/workflows/example.yml | 5 +++-- entrypoint.py | 6 ------ 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/.github/workflows/example.yml b/.github/workflows/example.yml index cc23cc3..5efc214 100644 --- a/.github/workflows/example.yml +++ b/.github/workflows/example.yml @@ -4,9 +4,10 @@ on: # Watch these branches... branches: - master + - develop # ...for changes to these files. - paths: - - 'pages/**/*.mdx?' + # paths: + # - 'pages/**/*.mdx?' jobs: syndicate: diff --git a/entrypoint.py b/entrypoint.py index 4cc1aad..a980516 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -3,12 +3,6 @@ import json import os import sys - -# NOTE This is where our action module lives in the container -# TODO Is there a way to manipulate the path from Dockerfile? -ACTION_SOURCE='/action' -sys.path.insert(0, os.path.abspath(ACTION_SOURCE)) - import syndicate from syndicate.utils import action_log, action_setoutput, job_getoutput, job_setoutput, get_posts, fronted, mark_syndicated_posts From 326d4f5d33e36167519a2ccc8f7b05bfbf649c06 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Mon, 3 Feb 2020 21:03:43 +0000 Subject: [PATCH 084/105] minor cleanings --- entrypoint.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index a980516..5739030 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -11,13 +11,13 @@ 'mark_as_syndicated': json.loads(os.getenv('INPUT_MARK_AS_SYNDICATED')) } -# Syndicate posts = get_posts() if not posts: - action_log("No posts added or updated, nothing to see here...") + action_log("No posts added or updated, nothing to do.") action_setoutput("time", datetime.now()) sys.exit() +# Syndicate # Result set format: # { # '': { @@ -33,9 +33,7 @@ # ... # } syndicated_posts = syndicate.elsewhere(posts, action_inputs['silos']) or {} -action_setoutput("time", datetime.now()) action_setoutput('syndicated_posts', syndicated_posts) - # Merge output with output of any previous runs job_setoutput(syndicated_posts) @@ -45,7 +43,7 @@ # If silos were provided, commit only the results of this step. In the case # where no silos were provided, commit all job results so far. # - # This allows us to bundle sydications into as few or many commits as we + # This allows us to bundle syndications into as few or many commits as we # want in our workflows. ## if not action_inputs['silos']: @@ -80,3 +78,5 @@ syndicate_ids_by_path, {post.path:fronted(post) for post in posts} ) + +action_setoutput("time", datetime.now()) From 7566517e644c253b41608aa7113661304f7bd6c4 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 11:44:40 +0000 Subject: [PATCH 085/105] Document utilities --- entrypoint.py | 4 +- syndicate/utils.py | 148 +++++++++++++++++++++++++++++++++------------ 2 files changed, 112 insertions(+), 40 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index 5739030..f8ca514 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -4,7 +4,7 @@ import os import sys import syndicate -from syndicate.utils import action_log, action_setoutput, job_getoutput, job_setoutput, get_posts, fronted, mark_syndicated_posts +from syndicate.utils import action_log, action_setoutput, job_getoutput, job_addoutput, get_posts, fronted, mark_syndicated_posts action_inputs = { 'silos': os.getenv('INPUT_SILOS').splitlines(), @@ -35,7 +35,7 @@ syndicated_posts = syndicate.elsewhere(posts, action_inputs['silos']) or {} action_setoutput('syndicated_posts', syndicated_posts) # Merge output with output of any previous runs -job_setoutput(syndicated_posts) +job_addoutput(syndicated_posts) if action_inputs['mark_as_syndicated']: action_log("Marking newly syndicated posts...") diff --git a/syndicate/utils.py b/syndicate/utils.py index e9654c3..1c9e4e9 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -5,20 +5,28 @@ import os import requests -### Github Action utilities ### def action_log(msg): + """(SIDE-EFFECT) Prints `msg` to the Github workflow log.""" print(msg) def action_debug(msg): + """(SIDE-EFFECT) Prints `msg` to the Github workflow debug log.""" print(f"::debug::{msg}") def action_warn(msg): + """(SIDE-EFFECT) Prints `msg` to the Github workflow warning log.""" print(f"::warning::{msg}") def action_error(msg): + """(SIDE-EFFECT) Prints `msg` to the Github workflow error log.""" print(f"::error::{msg}") def action_log_group(title): + """ + Decorates a function such that all its generated log statements are grouped + in the Github workflow log under `title`. + """ + def _decorator(func): def _wrapper(*args, **kwargs): print(f"::group::{title}") @@ -29,53 +37,97 @@ def _wrapper(*args, **kwargs): return _decorator def action_setenv(key, value): + """ + (SIDE-EFFECT) Sets an environment variable of the running Github workflow job. + """ print(f"::set-env name={key}::{value}") def action_setoutput(key, value): + """(SIDE-EFFECT) Sets an output variable of the running Github workflow step.""" print(f"::set-output name={key}::{value}") -def job_setoutput(results): - # Compile results for future steps - syndicated_posts = results - if 'SYNDICATE_POSTS' in os.environ: - syndicated_posts = job_getoutput() - syndicated_posts.update(results) +def job_addoutput(results): + """ + (SIDE-EFFECT) Persist `results` for future steps in the running Github + workflow job. + """ + syndicated_posts = job_getoutput() + syndicated_posts.update(results) action_setenv('SYNDICATE_POSTS', json.dumps(syndicated_posts)) def job_getoutput(): + """Returns the persisted results of the running Github workflow job.""" + # Default to an empty dictionary if no results have yet been persisted. return json.loads(os.getenv('SYNDICATE_POSTS', '{}')) # Memoize authentication and repo fetching. @functools.lru_cache(maxsize=1) def repo(): + """ + (MEMOIZED) Returns an authenticated reference to a `github3` repository + object for the repository this Github action is running in. + @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.repo.Repository + """ assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" gh = github3.login(token=os.getenv("GITHUB_TOKEN")) return gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) -## NOTE -## Our action may generate a new commit, so we need to be sure we're always -## using the proper SHA. -def target_sha(): +def parent_sha(): + """ + Returns the git SHA to use as parent for any commits generated by this + Github workflow step. + """ assert os.getenv("GITHUB_SHA"), "missing GITHUB_SHA" return os.getenv('SYNDICATE_SHA', os.getenv("GITHUB_SHA")) -def get_commit_payload(): - return repo().commit(target_sha()).files +def get_trigger_payload(): + """ + Returns a list of dictionaries describing each of the modified files in the + commit that triggered this Github workflow. + @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.comparison.Comparison.files + """ + assert os.getenv("GITHUB_SHA"), "missing GITHUB_SHA" + # NOTE + # Explicitly using GITHUB_SHA to ensure we always have access to the changed + # files even if other steps generate commits. + return repo().commit(os.getenv("GITHUB_SHA")).files def file_contents(filename): - return repo().file_contents(filename, target_sha()) + """ + Returns the `github3` `Contents` object of the matching `filename` in latest + known commit to this repo. + @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.contents.Contents + @see :func:`~syndicate.utils.parent_sha` + """ + # NOTE + # Using the latest known commit to ensure we capture any modifications made + # to the post frontmatter by previous actions. + return repo().file_contents(filename, parent_sha()) def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): - files = get_commit_payload() + """ + Returns the latest known :func:`~syndicate.utils.file_contents` of the files + added and modified in the commit that triggered this Github workflow. + """ + files = get_trigger_payload() assert files, "target commit was empty" posts = [file for file in files if file['filename'].startswith(post_dir)] - # Ignore deleted files. - return [file_contents(post['filename']) for post in posts if post['status'] != 'deleted'] + return [ + file_contents(post['filename']) + for post in posts + if post['status'] != 'deleted' # ignore deleted files + ] def fronted(post): + """ + Returns the :py:class:`frontmatter.Post` representation of the given + :func:`~syndicate.utils.file_contents` object. + + If `post` is actually already a `frontmatter.Post`, this is a no-op. + """ assert post, "missing post" if type(post) == frontmatter.Post: return post @@ -83,14 +135,33 @@ def fronted(post): return frontmatter.loads(raw_contents) def syndicate_key_for(silo): + """ + Returns a formatted string used to identify a syndicate ID in post + frontmatter. + """ return f'{silo.lower()}_syndicate_id' def syndicate_id_for(post, silo): + """ + Retrieves the appropriate post ID for `silo` from the frontmatter of the + given `post`; returns None if no relevant ID exists. + """ assert post, "missing post" assert silo, "missing silo" return fronted(post).get(syndicate_key_for(silo)) def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): + """ + Injects the given syndicate IDs for the given posts into their frontmatter + and commits the updated posts back to this repo. + + If a syndicate ID already exists in a given post, it is left untouched. + + Returns a dictionary which is the response of the commit request. + """ + assert syndicate_ids_by_path, "missing syndicate IDs" + assert fronted_posts_by_path, "missing fronted posts" + updated_fronted_posts_by_path = {} silos_included = set() for (path, syndicate_ids_by_silo) in syndicate_ids_by_path.items(): @@ -116,26 +187,27 @@ def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): # Create new fronted post with old frontmatter merged with syndicate IDs. updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **new_syndicate_ids)) updated_fronted_posts_by_path[path] = updated_post - return commit_post_changes(updated_fronted_posts_by_path, silos_included) - -## NOTE -# Following the recipe outlined here for creating a commit consisting of -# multiple file updates: -# https://developer.github.com/v3/git/ -# -# 1. Get the current commit object -# 2. Retrieve the tree it points to -# 3. Retrieve the content of the blob object that tree has for that -# particular file path -# 4. Change the content somehow and post a new blob object with that new -# content, getting a blob SHA back -# 5. Post a new tree object with that file path pointer replaced with your -# new blob SHA getting a tree SHA back -# 6. Create a new commit object with the current commit SHA as the parent -# and the new tree SHA, getting a commit SHA back -# 7. Update the reference of your branch to point to the new commit SHA -## -def commit_post_changes(fronted_posts_by_path, silos): + return commit_updated_posts(updated_fronted_posts_by_path, silos_included) + +def commit_updated_posts(fronted_posts_by_path, silos): + """ + Returns the response of committing the (presumably changed) given posts to + the remote GITHUB_REF of this repo by following the recipe outlined here: + + https://developer.github.com/v3/git/ + + 1. Get the current commit object + 2. Retrieve the tree it points to + 3. Retrieve the content of the blob object that tree has for that + particular file path + 4. Change the content somehow and post a new blob object with that new + content, getting a blob SHA back + 5. Post a new tree object with that file path pointer replaced with your + new blob SHA getting a tree SHA back + 6. Create a new commit object with the current commit SHA as the parent + and the new tree SHA, getting a commit SHA back + 7. Update the reference of your branch to point to the new commit SHA + """ if not fronted_posts_by_path: action_log("All good: already marked.") return None @@ -148,7 +220,7 @@ def commit_post_changes(fronted_posts_by_path, silos): path:repo().create_blob(frontmatter.dumps(fronted_post), 'utf-8') for (path, fronted_post) in fronted_posts_by_path.items() } - parent_sha = target_sha() + parent_sha = parent_sha() # Create a new tree with our updated blobs. new_tree = repo().create_tree( [ From 9c58c3ed83c0df8a101fc62760f916b80e281fe6 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 12:07:17 +0000 Subject: [PATCH 086/105] Document DEV.to adapter --- syndicate/silos/dev.py | 36 +++++++++++++++++++++++++++++++++--- syndicate/utils.py | 1 + tests/test_dev.py | 16 ++++++++-------- 3 files changed, 42 insertions(+), 11 deletions(-) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index be31335..2d3fcc6 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -5,9 +5,23 @@ SILO_NAME = 'DEV' @action_log_group(SILO_NAME) def syndicate(posts, api_key): + """ + Syndicates the given posts to https://dev.to, updating the ones that + already exist there and creating articles for the ones that don't. + + By default, articles are created in a "draft"/unpublished state, but this + can be overridden by individual posts by specifying `published: true` in + their frontmatter, if you prefer a "just do it" approach. + + This uses the DEV API, which is currently in beta: https://docs.dev.to/api + + The required API key can be generated for your account by following the steps + outlined here: https://docs.dev.to/api/#section/Authentication + """ + action_log(f"Hello? Yes, this is {SILO_NAME}.") results = { - 'added': {post.path:_draft(post, api_key) for post in posts if not syndicate_id_for(post, SILO_NAME)}, + 'added': {post.path:_create(post, api_key) for post in posts if not syndicate_id_for(post, SILO_NAME)}, 'modified': {post.path:_update(post, api_key) for post in posts if syndicate_id_for(post, SILO_NAME)} } action_log("The results are in:") @@ -16,9 +30,17 @@ def syndicate(posts, api_key): ### privates ### -## This is a simple semantic wrapper around the DEV API, currently in beta. +def _create(post, api_key=None): + """ + Creates a new article for the given post on DEV.to and returns the results + of the POST request as a dictionary. -def _draft(post, api_key=None): + This tries to create an **unpublished** draft. However, the 'published' + status can be overridden in the frontmatter of the post itself for a + "just do it" approach. + + @see https://docs.dev.to/api/#operation/createArticle + """ assert api_key, "missing API key" assert post, "missing post" assert fronted(post).get('title'), "article is missing a title" @@ -43,6 +65,14 @@ def _draft(post, api_key=None): return results['id'] def _update(post, api_key=None): + """ + Updates an article corresponding to the given post on DEV.to and returns the + results of the PUT request as a dictionary. + + If a corresponding article does not exist, this will fail. + + @see https://docs.dev.to/api/#operation/updateArticle + """ assert api_key, "missing API key" assert post, "missing post" diff --git a/syndicate/utils.py b/syndicate/utils.py index 1c9e4e9..ebd9d0c 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -28,6 +28,7 @@ def action_log_group(title): """ def _decorator(func): + @functools.wraps(func) def _wrapper(*args, **kwargs): print(f"::group::{title}") result = func(*args, **kwargs) diff --git a/tests/test_dev.py b/tests/test_dev.py index 99ad26d..f64bcba 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -5,29 +5,29 @@ import requests import requests_mock -def test_draft_error_when_api_key_missing(): +def test_create_error_when_api_key_missing(): with pytest.raises(AssertionError): - dev._draft(MockPost()) + dev._create(MockPost()) -def test_draft_error_when_post_missing(): +def test_create_error_when_post_missing(): with pytest.raises(AssertionError): - dev._draft(None) + dev._create(None) -def test_draft_returns_nothing_when_request_fails(requests_mock, monkeypatch): +def test_create_returns_nothing_when_request_fails(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') requests_mock.post( "https://dev.to/api/articles", status_code=requests.codes.unprocessable_entity, json={"error": "you made a unintelligble request"}) - assert not dev._draft(MockPost(), api_key='fake_api_key') + assert not dev._create(MockPost(), api_key='fake_api_key') -def test_draft_returns_something_on_success(requests_mock, monkeypatch): +def test_create_returns_something_on_success(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') requests_mock.post( "https://dev.to/api/articles", status_code=requests.codes.created, json={ 'type_of': 'article', 'id': 42 }) - assert dev._draft(MockPost(), api_key='fake_api_key') + assert dev._create(MockPost(), api_key='fake_api_key') def test_update_error_when_api_key_missing(): with pytest.raises(AssertionError): From 8dfca808b605282289f16dba820a3f90ab798aeb Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 12:23:40 +0000 Subject: [PATCH 087/105] Document top-level `elsewhere` function --- syndicate/__init__.py | 50 +++++++++++++++++++++++++++++++++++++------ 1 file changed, 43 insertions(+), 7 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index f2cd46c..885d537 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -6,12 +6,44 @@ import sys def elsewhere(posts, silos): + """ + Syndicates the given posts to the given silos and returns a dictionary of + the results keyed by the silo that generated them. + + If a silo has no defined adapter, it is ignored. + If a silo has no defined API key, it is ignored. + + Result dictionary is formatted like so: + + { + : { + 'added': { + : , + ... + }, + 'modified': { + : , + ... + } + }, + ... + } + + Since not all silos may be in sync, the 'added' posts of one silo may be + merely 'modified' by another, and vice versa. + + Where possible, silo adapters should only create posts in a 'draft' or + unpublished status, to allow time for review and any platform-specific + changes to be made by the author. + """ + if not posts: + action_log("No posts to syndicate, nothing to do.") + return None if not silos: - action_log('No silos specified, nothing to see here.') + action_log('No silos specified, nothing to do.') return None - # De-dupe. - silos = list(set(silos)) + silos = list(set(silos)) # de-dupe the given list of silos action_log(f"You want to publish to these places: {silos}") specs = {silo:_locate(silo) for silo in silos if _locate(silo)} @@ -39,13 +71,17 @@ def elsewhere(posts, silos): @functools.lru_cache(maxsize=10) def _locate(silo): + """Locates the given silo adapter and returns its Python module name if found.""" + assert silo, 'missing silo' return getattr(importlib.util.find_spec(f'syndicate.silos.{silo.lower()}'), 'name', None) def _syndicate(silo_spec, api_key, posts): - if silo_spec and api_key: - return importlib.import_module(silo_spec).syndicate(posts, api_key) - else: - return None + """Loads and invokes the entrypoint of the given silo adaptor, returning the results.""" + assert silo_spec, 'missing silo spec' + assert api_key, 'missing API key' + return importlib.import_module(silo_spec).syndicate(posts, api_key) def _get_api_key(silo): + """Returns the API key for the given silo, as defined in the environment.""" + assert silo, 'missing silo' return os.getenv(_API_KEY(silo)) From 0298d5565f195f45dff8f5e417d9c2a32c15bf1f Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 12:32:59 +0000 Subject: [PATCH 088/105] Document test mock: MockPost --- tests/mocks.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/mocks.py b/tests/mocks.py index a0eb2d4..beb989d 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -1,9 +1,11 @@ import frontmatter import textwrap -# A light-weight, as-needed mock of github3.repos.contents.Contents -# @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.contents.Contents class MockPost: + """ + A light-weight mock of a post object. + @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.contents.Contents + """ def __init__(self): self.raw_contents = textwrap.dedent( """ From 2d62ee22e221e555e6a46897d8c9d9922ebd6c2c Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 22:40:30 +0000 Subject: [PATCH 089/105] Lowercase action name --- action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/action.yml b/action.yml index 6371ab1..002d5e6 100644 --- a/action.yml +++ b/action.yml @@ -1,4 +1,4 @@ -name: 'Syndicate' +name: 'syndicate' description: 'Publish your content elsewhere (P.O.S.S.E)' branding: icon: rss From 7228d9f3369e93c19dd0f2ebc812648012b17ef0 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 22:57:11 +0000 Subject: [PATCH 090/105] Remove unused code from test mock --- tests/mocks.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/mocks.py b/tests/mocks.py index beb989d..017a964 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -16,7 +16,5 @@ def __init__(self): --- What is a body? """).strip() - self.front, _ = frontmatter.parse(self.raw_contents) self.decoded = self.raw_contents.encode('utf-8') self.name = 'a-beautiful-mock.md' - self.html_url = f'https://silo.com/{self.name}' From d609b3cf0d5698bcecfe5a98c32e7d96d90c6995 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 22:57:25 +0000 Subject: [PATCH 091/105] Unify terminology: 'syndicate ID' -> 'silo ID' --- action.yml | 2 +- entrypoint.py | 16 +++++++------- syndicate/silos/dev.py | 14 ++++++------ syndicate/utils.py | 48 ++++++++++++++++++++---------------------- tests/mocks.py | 2 +- tests/test_dev.py | 6 +++--- 6 files changed, 43 insertions(+), 45 deletions(-) diff --git a/action.yml b/action.yml index 002d5e6..79a8f11 100644 --- a/action.yml +++ b/action.yml @@ -14,7 +14,7 @@ inputs: required: false default: '' mark_as_syndicated: - description: 'Commit syndicate IDs to the frontmatter of newly syndicated posts for synchronization purposes' + description: 'Commit silo IDs to the frontmatter of newly syndicated posts for synchronization purposes' required: false default: false diff --git a/entrypoint.py b/entrypoint.py index f8ca514..7db0fc2 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -17,16 +17,16 @@ action_setoutput("time", datetime.now()) sys.exit() -# Syndicate +# Do the thing. # Result set format: # { # '': { # 'added': { -# 'path/to/new_post': , +# 'path/to/new_post': , # ... # }, # 'modified': { -# 'path/to/updated_post': , +# 'path/to/updated_post': , # ... # } # }, @@ -68,14 +68,14 @@ # }, # ... # } - syndicate_ids_by_path = {} + silo_ids_by_path = {} for (silo, indexed_paths) in indexed_paths_by_silo.items(): - for (path, id) in indexed_paths.items(): - syndicate_ids_by_path.setdefault(path, {}) - syndicate_ids_by_path[path][silo] = id + for (path, sid) in indexed_paths.items(): + silo_ids_by_path.setdefault(path, {}) + silo_ids_by_path[path][silo] = sid mark_syndicated_posts( - syndicate_ids_by_path, + silo_ids_by_path, {post.path:fronted(post) for post in posts} ) diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 2d3fcc6..bfe2eea 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -1,4 +1,4 @@ -from syndicate.utils import action_log_group, action_log, action_error, fronted, syndicate_id_for +from syndicate.utils import action_log_group, action_log, action_error, fronted, silo_id_for import requests import pprint @@ -21,8 +21,8 @@ def syndicate(posts, api_key): action_log(f"Hello? Yes, this is {SILO_NAME}.") results = { - 'added': {post.path:_create(post, api_key) for post in posts if not syndicate_id_for(post, SILO_NAME)}, - 'modified': {post.path:_update(post, api_key) for post in posts if syndicate_id_for(post, SILO_NAME)} + 'added': {post.path:_create(post, api_key) for post in posts if not silo_id_for(post, SILO_NAME)}, + 'modified': {post.path:_update(post, api_key) for post in posts if silo_id_for(post, SILO_NAME)} } action_log("The results are in:") action_log(pprint.pformat(results)) @@ -32,8 +32,8 @@ def syndicate(posts, api_key): def _create(post, api_key=None): """ - Creates a new article for the given post on DEV.to and returns the results - of the POST request as a dictionary. + Creates a new article for the given post on DEV.to and returns the silo ID of + the newly created article. This tries to create an **unpublished** draft. However, the 'published' status can be overridden in the frontmatter of the post itself for a @@ -67,7 +67,7 @@ def _create(post, api_key=None): def _update(post, api_key=None): """ Updates an article corresponding to the given post on DEV.to and returns the - results of the PUT request as a dictionary. + silo ID of the updated arcticle. If a corresponding article does not exist, this will fail. @@ -76,7 +76,7 @@ def _update(post, api_key=None): assert api_key, "missing API key" assert post, "missing post" - endpoint = f'https://dev.to/api/articles/{syndicate_id_for(post, SILO_NAME)}' + endpoint = f'https://dev.to/api/articles/{silo_id_for(post, SILO_NAME)}' headers = {'api-key': api_key} payload = {'article': { 'body_markdown': post.decoded.decode('utf-8') } } response = requests.put(endpoint, headers=headers, json=payload) diff --git a/syndicate/utils.py b/syndicate/utils.py index ebd9d0c..ec01642 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -135,58 +135,56 @@ def fronted(post): raw_contents = post.decoded.decode('utf-8') return frontmatter.loads(raw_contents) -def syndicate_key_for(silo): - """ - Returns a formatted string used to identify a syndicate ID in post - frontmatter. - """ - return f'{silo.lower()}_syndicate_id' +def silo_key_for(silo): + """Returns a formatted string used to identify a silo ID in post frontmatter.""" + return f'{silo.lower()}_silo_id' -def syndicate_id_for(post, silo): +def silo_id_for(post, silo): """ - Retrieves the appropriate post ID for `silo` from the frontmatter of the - given `post`; returns None if no relevant ID exists. + Retrieves the ID appropriate for `silo` from the frontmatter of the given + `post`; returns None if no relevant ID exists. """ assert post, "missing post" assert silo, "missing silo" - return fronted(post).get(syndicate_key_for(silo)) + return fronted(post).get(silo_key_for(silo)) -def mark_syndicated_posts(syndicate_ids_by_path, fronted_posts_by_path): +def mark_syndicated_posts(silo_ids_by_path, fronted_posts_by_path): """ - Injects the given syndicate IDs for the given posts into their frontmatter + Injects the given silo IDs for the given posts into their frontmatter and commits the updated posts back to this repo. - If a syndicate ID already exists in a given post, it is left untouched. + If a silo ID already exists in a given post, that's fine: we assume IDs don't + change, and so we don't try to change them. Returns a dictionary which is the response of the commit request. """ - assert syndicate_ids_by_path, "missing syndicate IDs" + assert silo_ids_by_path, "missing silo IDs" assert fronted_posts_by_path, "missing fronted posts" updated_fronted_posts_by_path = {} silos_included = set() - for (path, syndicate_ids_by_silo) in syndicate_ids_by_path.items(): + for (path, silo_ids_by_silo) in silo_ids_by_path.items(): fronted_post = fronted_posts_by_path[path] # Format: # { - # 'silo_a_syndicate_id': 42, - # 'silo_b_syndicate_id': 'abc123', + # 'dev_silo_id': 42, + # 'medium_silo_id': 'abc123', # ... # } - new_syndicate_ids = {} - for (silo, sid) in syndicate_ids_by_silo.items(): - # Ignore already posts already marked with this silo - if not syndicate_id_for(fronted_post, silo): - new_syndicate_ids[syndicate_key_for(silo)] = sid + new_silo_ids = {} + for (silo, sid) in silo_ids_by_silo.items(): + # Ignore already posts marked with this silo + if not silo_id_for(fronted_post, silo): + new_silo_ids[silo_key_for(silo)] = sid silos_included.add(silo) # Only add to commit if there're any new IDs to add. - if not new_syndicate_ids: + if not new_silo_ids: continue - # Create new fronted post with old frontmatter merged with syndicate IDs. - updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **new_syndicate_ids)) + # Create new fronted post with old frontmatter merged with silo IDs. + updated_post = frontmatter.Post(**dict(fronted_post.to_dict(), **new_silo_ids)) updated_fronted_posts_by_path[path] = updated_post return commit_updated_posts(updated_fronted_posts_by_path, silos_included) diff --git a/tests/mocks.py b/tests/mocks.py index 017a964..21d1f72 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -10,7 +10,7 @@ def __init__(self): self.raw_contents = textwrap.dedent( """ --- - dev_syndicate_id: 42 + dev_silo_id: 42 title: A beautiful mock tags: beauty, fake --- diff --git a/tests/test_dev.py b/tests/test_dev.py index f64bcba..e0ee36f 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -1,4 +1,4 @@ -from syndicate.utils import syndicate_id_for +from syndicate.utils import silo_id_for from syndicate.silos import dev from .mocks import MockPost import pytest @@ -41,7 +41,7 @@ def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') mock = MockPost() requests_mock.put( - f"https://dev.to/api/articles/{syndicate_id_for(mock, dev.SILO_NAME)}", + f"https://dev.to/api/articles/{silo_id_for(mock, dev.SILO_NAME)}", status_code=requests.codes.unprocessable_entity, json={"error": "you made an unintelligble request"}) assert not dev._update(mock, api_key='fake_api_key') @@ -49,7 +49,7 @@ def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch): def test_update_returns_something_on_success(requests_mock, monkeypatch): monkeypatch.setenv('GITHUB_REPOSITORY', 'herp/derp') mock = MockPost() - mock_id= syndicate_id_for(mock, dev.SILO_NAME) + mock_id= silo_id_for(mock, dev.SILO_NAME) requests_mock.put( f"https://dev.to/api/articles/{mock_id}", status_code=requests.codes.ok, From 55f8dcfc4a376337b98a4a102f1e0ca68155a1b0 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 23:04:45 +0000 Subject: [PATCH 092/105] bugfix(utils): Fix reference error caused by shadow --- syndicate/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/syndicate/utils.py b/syndicate/utils.py index ec01642..f8f662a 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -219,7 +219,7 @@ def commit_updated_posts(fronted_posts_by_path, silos): path:repo().create_blob(frontmatter.dumps(fronted_post), 'utf-8') for (path, fronted_post) in fronted_posts_by_path.items() } - parent_sha = parent_sha() + parent = parent_sha() # Create a new tree with our updated blobs. new_tree = repo().create_tree( [ @@ -231,7 +231,7 @@ def commit_updated_posts(fronted_posts_by_path, silos): } for (path, blob_sha) in new_blobs_by_path.items() ], - base_tree=parent_sha + base_tree=parent ) # Update the parent tree with our new subtree. @@ -241,7 +241,7 @@ def commit_updated_posts(fronted_posts_by_path, silos): new_commit = repo().create_commit( f'(syndicate): adding IDs for {silos}', new_tree.sha, - [parent_sha] + [parent] ) response = requests.put( f'https://api.github.com/repos/{os.getenv("GITHUB_REPOSITORY")}/git/{os.getenv("GITHUB_REF")}', From 706ef936f39ed1ffe8881dd8d8240618539e2e2f Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 23:12:31 +0000 Subject: [PATCH 093/105] tweak log messages --- syndicate/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 885d537..560b8b2 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -37,10 +37,10 @@ def elsewhere(posts, silos): changes to be made by the author. """ if not posts: - action_log("No posts to syndicate, nothing to do.") + action_log("No posts to syndicate, nothing to syndicate.") return None if not silos: - action_log('No silos specified, nothing to do.') + action_log('No silos specified, nowhere to syndicate.') return None silos = list(set(silos)) # de-dupe the given list of silos From 5d8b8b9e43473bef6069ac56853523cea16dd8f0 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 23:19:02 +0000 Subject: [PATCH 094/105] Skip marking when nothing added --- entrypoint.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index 7db0fc2..c0f2fe6 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -53,10 +53,10 @@ indexed_paths_by_silo = { silo: results['added'] for (silo, results) in syndicated_posts.items() - if results + if results and 'added' in results } - if not indexed_paths_by_silo: + if not indexed_paths_by_silo or not any(indexed_paths_by_silo.values()): action_log("Nothing new to mark.") sys.exit() From 8c120c8ebe0984e02b983d118c7b2328ead83526 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Tue, 4 Feb 2020 23:27:55 +0000 Subject: [PATCH 095/105] Disable dev mode in example workflow --- .github/workflows/example.yml | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/example.yml b/.github/workflows/example.yml index 5efc214..cc23cc3 100644 --- a/.github/workflows/example.yml +++ b/.github/workflows/example.yml @@ -4,10 +4,9 @@ on: # Watch these branches... branches: - master - - develop # ...for changes to these files. - # paths: - # - 'pages/**/*.mdx?' + paths: + - 'pages/**/*.mdx?' jobs: syndicate: From d766586ee9db2e3016a0129045ebba66a0bbafc8 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Wed, 5 Feb 2020 11:47:09 +0000 Subject: [PATCH 096/105] Add tests for 'syndicate.elsewhere' --- tests/test_syndicate.py | 45 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 tests/test_syndicate.py diff --git a/tests/test_syndicate.py b/tests/test_syndicate.py new file mode 100644 index 0000000..2deffd5 --- /dev/null +++ b/tests/test_syndicate.py @@ -0,0 +1,45 @@ +import importlib.util +import pytest +import syndicate + +@pytest.fixture(autouse=True) +def clear_silo_cache(): + """Needed to ensure our monkeypatching doesn't get cached between tests.""" + yield + syndicate._locate.cache_clear() + +def test_elsewhere_returns_none_when_given_no_posts(): + assert not syndicate.elsewhere([], ['Fake_Silo']) + +def test_elsewhere_returns_none_when_given_no_silos(): + assert not syndicate.elsewhere(['a post'], []) + +def test_elsewhere_returns_none_when_no_api_keys_exist_for_given_silos(monkeypatch): + fake_silo = 'Fake_Silo' + # Ensure we cannot use the fake silo adapter. + monkeypatch.delenv(syndicate._API_KEY(fake_silo), raising=False) + assert not syndicate.elsewhere(['a post'], [fake_silo]) + +def test_elsewhere_returns_none_when_no_adapter_exists_for_given_silos(monkeypatch): + fake_silo = 'Fake_Silo' + # Ensure we cannot find the fake silo adapter. + monkeypatch.setattr(importlib.util, 'find_spec', lambda s: None) + # Ensure we can use the fake silo adapter. + monkeypatch.setenv(syndicate._API_KEY(fake_silo), 'fake API key') + assert not syndicate.elsewhere(['a post'], [fake_silo]) + +def test_elsewhere_returns_syndication_results_for_recognized_silos_when_given_api_keys(monkeypatch): + class MockSpec: + def __init__(self): + self.name = 'mock_spec' + class MockSilo: + def syndicate(posts, api_key): + return 'mock results' + fake_silo = 'Fake_Silo' + # Ensure we can find the fake silo adapter. + monkeypatch.setattr(importlib.util, 'find_spec', lambda s: MockSpec()) + # Ensure we can load the fake silo adapter. + monkeypatch.setattr(importlib, 'import_module', lambda s: MockSilo) + # Ensure we can use the fake silo adapter. + monkeypatch.setenv(syndicate._API_KEY(fake_silo), 'fake API key') + assert syndicate.elsewhere(['a post'], [fake_silo]) From 2fb9997658ef09d2548f2e9d322ae2807541266e Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Wed, 5 Feb 2020 20:56:19 +0000 Subject: [PATCH 097/105] Return silo URLs in addition to IDs --- entrypoint.py | 8 ++++---- syndicate/silos/dev.py | 10 +++++----- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index c0f2fe6..d1d9ba4 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -22,13 +22,13 @@ # { # '': { # 'added': { -# 'path/to/new_post': , +# 'path/to/new_post': ( , ), # ... # }, # 'modified': { -# 'path/to/updated_post': , +# 'path/to/updated_post': ( , ), # ... -# } +# }, # }, # ... # } @@ -70,7 +70,7 @@ # } silo_ids_by_path = {} for (silo, indexed_paths) in indexed_paths_by_silo.items(): - for (path, sid) in indexed_paths.items(): + for (path, ( sid, _ )) in indexed_paths.items(): silo_ids_by_path.setdefault(path, {}) silo_ids_by_path[path][silo] = sid diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index bfe2eea..fdacdb9 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -32,8 +32,8 @@ def syndicate(posts, api_key): def _create(post, api_key=None): """ - Creates a new article for the given post on DEV.to and returns the silo ID of - the newly created article. + Creates a new article for the given post on DEV.to and returns the silo ID + and URL of the newly created article. This tries to create an **unpublished** draft. However, the 'published' status can be overridden in the frontmatter of the post itself for a @@ -62,12 +62,12 @@ def _create(post, api_key=None): return None else: results = response.json() - return results['id'] + return (results['id'], results['url']) def _update(post, api_key=None): """ Updates an article corresponding to the given post on DEV.to and returns the - silo ID of the updated arcticle. + silo ID and URL of the updated arcticle. If a corresponding article does not exist, this will fail. @@ -85,4 +85,4 @@ def _update(post, api_key=None): return None else: results = response.json() - return results['id'] + return (results['id'], results['url']) From c2cd6936bff4b5c80460084b09519bb1609c06df Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 7 Feb 2020 15:55:01 +0100 Subject: [PATCH 098/105] Switching from github3 to PyGithub --- Dockerfile | 2 +- requirements.txt | 2 +- syndicate/silos/dev.py | 4 +- syndicate/utils.py | 96 ++++++++++++++++++------------------------ tests/mocks.py | 4 +- tests/test_dev.py | 4 +- 6 files changed, 49 insertions(+), 63 deletions(-) diff --git a/Dockerfile b/Dockerfile index cd3756a..89405c7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ # Would like to use python:3-alpine, but it doesn't have 'gcc' and the github3 library needs that. -FROM python:3 +FROM python:3-alpine WORKDIR /action diff --git a/requirements.txt b/requirements.txt index ce601f7..50baab3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,3 @@ requests -github3.py +PyGithub python-frontmatter diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index fdacdb9..98f0692 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -50,7 +50,7 @@ def _create(post, api_key=None): # NOTE This can be overridden by explicitly setting 'published' in # the frontmatter. 'published': False, - 'body_markdown': post.decoded.decode('utf-8') + 'body_markdown': post.decoded_content.decode('utf-8') } } endpoint = "https://dev.to/api/articles" @@ -78,7 +78,7 @@ def _update(post, api_key=None): endpoint = f'https://dev.to/api/articles/{silo_id_for(post, SILO_NAME)}' headers = {'api-key': api_key} - payload = {'article': { 'body_markdown': post.decoded.decode('utf-8') } } + payload = {'article': { 'body_markdown': post.decoded_content.decode('utf-8') } } response = requests.put(endpoint, headers=headers, json=payload) if response.status_code != requests.codes.ok: action_error(f"Failed to update post '{post.name}': {response.json()}") diff --git a/syndicate/utils.py b/syndicate/utils.py index f8f662a..e2d6f1e 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -1,6 +1,6 @@ import frontmatter import functools -import github3 +from github import Github, InputGitTreeElement import json import os import requests @@ -65,15 +65,15 @@ def job_getoutput(): @functools.lru_cache(maxsize=1) def repo(): """ - (MEMOIZED) Returns an authenticated reference to a `github3` repository - object for the repository this Github action is running in. - @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.repo.Repository + (MEMOIZED) Returns an authenticated reference to a repository object for the + repository this Github action is running in. + @see https://pygithub.readthedocs.io/en/latest/github_objects/Repository.html#github.Repository.Repository """ assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" - gh = github3.login(token=os.getenv("GITHUB_TOKEN")) - return gh.repository(*os.getenv("GITHUB_REPOSITORY").split('/')) + gh = Github(os.getenv("GITHUB_TOKEN")) + return gh.get_repo(os.getenv("GITHUB_REPOSITORY")) def parent_sha(): """ @@ -85,27 +85,27 @@ def parent_sha(): def get_trigger_payload(): """ - Returns a list of dictionaries describing each of the modified files in the - commit that triggered this Github workflow. - @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.comparison.Comparison.files + Returns a list of lightweight File objects describing each of the modified + files in the commit that triggered this Github workflow. + @see https://pygithub.readthedocs.io/en/latest/github_objects/File.html#github.File.File """ assert os.getenv("GITHUB_SHA"), "missing GITHUB_SHA" # NOTE # Explicitly using GITHUB_SHA to ensure we always have access to the changed # files even if other steps generate commits. - return repo().commit(os.getenv("GITHUB_SHA")).files + return repo().get_commit(os.getenv("GITHUB_SHA")).files -def file_contents(filename): +def file_contents(filepath): """ - Returns the `github3` `Contents` object of the matching `filename` in latest - known commit to this repo. - @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.contents.Contents + Returns a `ContentFile` object of the matching the given path in latest known + commit to this repo. + @see https://pygithub.readthedocs.io/en/latest/github_objects/ContentFile.html#github.ContentFile.ContentFile @see :func:`~syndicate.utils.parent_sha` """ # NOTE # Using the latest known commit to ensure we capture any modifications made # to the post frontmatter by previous actions. - return repo().file_contents(filename, parent_sha()) + return repo().get_contents(filepath, ref=parent_sha()) def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): """ @@ -115,11 +115,11 @@ def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): files = get_trigger_payload() assert files, "target commit was empty" - posts = [file for file in files if file['filename'].startswith(post_dir)] + posts = [file for file in files if file.filename.startswith(post_dir)] return [ - file_contents(post['filename']) + file_contents(post.filename) for post in posts - if post['status'] != 'deleted' # ignore deleted files + if post.status != 'deleted' # ignore deleted files ] def fronted(post): @@ -132,7 +132,7 @@ def fronted(post): assert post, "missing post" if type(post) == frontmatter.Post: return post - raw_contents = post.decoded.decode('utf-8') + raw_contents = post.decoded_content.decode('utf-8') return frontmatter.loads(raw_contents) def silo_key_for(silo): @@ -214,48 +214,34 @@ def commit_updated_posts(fronted_posts_by_path, silos): assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" assert os.getenv("GITHUB_REF"), "missing GITHUB_REF" - # Create new blobs in the repo's Git database containing the updated contents of our posts. - new_blobs_by_path = { - path:repo().create_blob(frontmatter.dumps(fronted_post), 'utf-8') - for (path, fronted_post) in fronted_posts_by_path.items() - } parent = parent_sha() # Create a new tree with our updated blobs. - new_tree = repo().create_tree( + new_tree = repo().create_git_tree( [ - { - 'path': path, - 'mode': '100644', # 'file', @see https://developer.github.com/v3/git/trees/#tree-object - 'type': 'blob', - 'sha': blob_sha - } - for (path, blob_sha) in new_blobs_by_path.items() + InputGitTreeElement( + path, + mode='100644', # 'file', @see https://developer.github.com/v3/git/trees/#tree-object + type='blob', + content=frontmatter.dumps(fronted_post) + ) + for (path, fronted_post) in fronted_posts_by_path.items() ], - base_tree=parent + base_tree=repo().get_git_tree(parent) ) - # Update the parent tree with our new subtree. - # NOTE The github3 package I'm using apparently doesn't support updating refs -_- - # Hand-rolling my own using the Github API directly. - # @see https://developer.github.com/v3/ - new_commit = repo().create_commit( + # Commit the new tree. + new_commit = repo().create_git_commit( f'(syndicate): adding IDs for {silos}', - new_tree.sha, - [parent] + new_tree, + [repo().get_git_commit(parent)] ) - response = requests.put( - f'https://api.github.com/repos/{os.getenv("GITHUB_REPOSITORY")}/git/{os.getenv("GITHUB_REF")}', - headers={ - 'Authorization': f"token {os.getenv('GITHUB_TOKEN')}", - 'Accept': 'application/vnd.github.v3+json' - }, - json={'sha': new_commit.sha} - ) - if response.status_code == requests.codes.ok: - ## NOTE Need to update the reference SHA for future workflow steps. - action_setenv('SYNDICATE_SHA', new_commit.sha) - action_log("Syndicate posts marked.") - return response.json() - else: - action_error(f"Failed to mark syndicated posts: {response.json()}") + # Poosh it. + ref_name = os.getenv('GITHUB_REF').lstrip('refs/') + try: + repo().get_git_ref(ref_name).edit(new_commit.sha) + except github.GithubException as err: + action_error(f"Failed to mark syndicated posts: {err}") return None + ## NOTE Need to update the reference SHA for future workflow steps. + action_setenv('SYNDICATE_SHA', new_commit.sha) + action_log("Syndicate posts marked.") diff --git a/tests/mocks.py b/tests/mocks.py index 21d1f72..136e47b 100644 --- a/tests/mocks.py +++ b/tests/mocks.py @@ -4,7 +4,7 @@ class MockPost: """ A light-weight mock of a post object. - @see https://github3.readthedocs.io/en/master/api-reference/repos.html#github3.repos.contents.Contents + @see https://pygithub.readthedocs.io/en/latest/github_objects/ContentFile.html#github.ContentFile.ContentFile """ def __init__(self): self.raw_contents = textwrap.dedent( @@ -16,5 +16,5 @@ def __init__(self): --- What is a body? """).strip() - self.decoded = self.raw_contents.encode('utf-8') + self.decoded_content = self.raw_contents.encode('utf-8') self.name = 'a-beautiful-mock.md' diff --git a/tests/test_dev.py b/tests/test_dev.py index e0ee36f..21cec36 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -26,7 +26,7 @@ def test_create_returns_something_on_success(requests_mock, monkeypatch): requests_mock.post( "https://dev.to/api/articles", status_code=requests.codes.created, - json={ 'type_of': 'article', 'id': 42 }) + json={ 'type_of': 'article', 'id': 42, 'url': 'https://fake.url/for-this-post' }) assert dev._create(MockPost(), api_key='fake_api_key') def test_update_error_when_api_key_missing(): @@ -53,5 +53,5 @@ def test_update_returns_something_on_success(requests_mock, monkeypatch): requests_mock.put( f"https://dev.to/api/articles/{mock_id}", status_code=requests.codes.ok, - json={'type_of': 'article', 'id': mock_id}) + json={'type_of': 'article', 'id': mock_id, 'url': 'https://fake.url/for-this-post'}) assert dev._update(mock, api_key='fake_api_key') From e1702b46a7aa71ae43ad4bd661917b77eaace2d0 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Fri, 7 Feb 2020 19:00:51 +0100 Subject: [PATCH 099/105] Write the README --- README.md | 183 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 182 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 1123750..9a0d9e0 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,183 @@ # syndicate -a simple implementation of the [P.O.S.S.E.](https://indieweb.org/POSSE) content publishing model + +A simple implementation of the [P.O.S.S.E.](https://indieweb.org/POSSE) content publishing model. + +Write your content, store it on Github, and use this action in a workflow to draft it to silo platforms like [DEV.to](https://dev.to). The action will keep the silos up to date with your latest changes here on Github. + +Wherever possible, when content is syndicated to a silo for the first time, it is created in an unpublished/"draft" form. Any exceptions to this will be called out in the documentation for [`silos`](#silos) below. + +## Example usage + +See [the example workflow](https://github.com/dabrady/syndicate/blob/develop/.github/workflows/example.yml) for a fully annotated example, but here's the quick version: + +```yaml +uses: dabrady/syndicate@v1.0 +env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DEV_API_KEY: ${{ secrets.DEV_API_KEY }} + SYNDICATE_POSTS_DIR: pages/posts +with: + silos: DEV + mark_as_syndicated: true +``` + +## Be aware: Github is the source of truth +Syndication is a one-way street: changes made to your content on Github will be copied to your silos, but changes made to a copy of the content on a particular silo will not be synced to your Github repository. + +**Github is the source of truth**: any changes made on specific platforms will be overwritten with whatever is in Github the next time this action processes a change to that content. + +This can have undesirable effects. Not all platforms support the same writing systems, and you might often find yourself needing to tweak your content on a particular silo before you publish it; but if you then make an update to it on Github, those silo-specific tweaks will be wiped away and you'll have to do it again. + +For this reason, by default this action treats your content as immutable, and creates a new draft in the specified silos for every commit you make to a particular file. This prevents overwriting existing published content with content that is unsuitable for that platform. + +This comes with its own set of drawbacks and annoyances, however, so it is possible to simply manifest new content as new drafts, and push updates to existing content directly to their existing syndicated counterparts. + +## Inputs + +### `silos` + +_Default: `none`_ + +A YAML list of platforms to syndicate your content to. Silo names are case insensitive but should be snake_cased if they contain spaces. +E.g. + +```yaml +with: + silos: | + DEV + Medium + CNN + BBC +``` + +If a given silo is unsupported, it will be ignored and called out in the action log. + +The current supported silos are: +- `DEV` (https://dev.to) + +### `mark_as_syndicated` + +_Default: `false`_ + +A flag used to trigger a commit upstream which adds the silo-specific IDs to the YAML frontmatter of your syndicated content. This ensures that any subsequent changes you make to your posts will trigger an update to the syndicated copy, instead of triggering the creation of a new draft on your silos. + +For instance, if the commit that triggered this workflow added a new post called `pages/posts/i-got-a-new-cat.md`, a step in your workflow configured like this: + +```yaml +steps: +- name: Push to DEV.to and sync IDs + uses: dabrady/syndicate@v1.0 + with: + silos: DEV + mark_as_syndicated: true +``` + +will create a new draft on DEV.to with a copy of `pages/posts/i-got-a-new-cat.md` and result in a commit to the upstream head of the branch that triggered this workflow that looks like this: + +```diff +diff --git a/pages/posts/i-got-a-new-cat.md b/pages/posts/i-got-a-new-cat.md +index e94caa8..cc23cc3 100644 +--- a/pages/posts/i-got-a-new-cat.md ++++ b/pages/posts/i-got-a-new-cat.md +@@ -2,3 +2,4 @@ on: +--- ++dev_silo_id: 5316572 +title: I got a new cat! +--- +``` + +Providing no silos, but asking to mark new posts as syndicated, will ensure any posts added to a silo **by previous steps** are properly marked before the job completes. Think of it like a save point: this approach to using the flag allows you to bundle silo syndication into as many or as few commits as you wish: + +```yaml +steps: +... +- name: Save unsaved silo IDs to Github + uses: dabrady/syndicate@v1.0 + with: + mark_as_syndicated: true +``` + +### Environment variables + +#### Required + +##### `GITHUB_TOKEN` + +In order to syndicate your content, this action needs access to your content. + +A unique `GITHUB_TOKEN` secret is created by Github for every workflow run for use by actions to access the repository, and needs to be added to the environment of this action in your workflow setup. E.g. +```yaml +steps: +- name: Push to DEV.to and sync IDs + uses: dabrady/syndicate@v1.0 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + silos: DEV + mark_as_syndicated: true +``` + +See the [Github Workflow documentation](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token) for full details. + +##### `_API_KEY` + +It is assumed that this action will need to interact with every supported silo via a public API, and that that API authenticates via a personal API key. + +Thus, this action will ignore any silos specified unless a corresponding API key is exposed in its environment. The keys are expected to be found in environment variables matching the following format: + +``` +_API_KEY +``` + +where `` is a SCREAMING\_SNAKE\_CASE version of a recognized argument for the `silos` action input. For example, the API key for the `DEV` silo should be exposed as the `DEV_API_KEY` environment variable. + +For details on how to expose these secrets to the action without exposing them to the world, see the [Github documentation on working with secrets](https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets). + +#### Optional + +##### `SYNDICATE_POST_DIR` + +_Default: `posts`_ + +Natrually, not all commits to your repo may contain a change to the content you want to syndicate. + +The simplistic approach currently implemented for identifying the proper files is to look for file paths with a given prefix/in a particular directory of your repo. Set this environment variable to the place in your repo (relative to the root) where you keep the stuff you want to share elsewhere. + +(The choice to use an environment variable for this instead of an input is so that you can set it once in your workflow and not have to specify it on every use of this action, should you choose to use it multiple times in a given workflow.) + +## Outputs + +### `time` + +A timestamp marking the end of the action. + +### `syndicated_posts` + +A JSON-formatted string of posts that were added/modified on each of the `silos` specified, including the unique identifiers given to them by the silo and the public URL of the added/modified post. +E.g. +```json +{ + "DEV": { + "added": { + "pages/posts/i-got-a-new-cat.md": [ 201054451, "https://dev.to/daniel13rady/i-got-a-new-cat-aej2-temp-slug-0246" ] + }, + "modified": {} + }, + "Medium": { ... }, + ... +} +``` + +#### Environment variables + +##### `SYNDICATE_SHA` +:warning: Internal, do not set this yourself. + +Using the `mark_as_syndicated` flag will cause a commit to be generated and pushed to the upstream of the branch that triggered the workflow. The generated commit SHA is stored in this variable for use as the parent of any commits generated by later steps and considered to be the 'head' of the branch when present. + +##### `SYNDICATE_POSTS` +:warning: Internal, do not set this yourself. + +**NOTE** The word is 'syndicate', not ~~'syndicate**d**'~~. It is a prefix used by convention on all environment variables set by this action. + +A JSON string formatted identically to the `syndicated_posts` action output, but containing the composite results of all invocations of this action so far in the running workflow. From edc91a3a485e81f789f183b7cee96f29a571c1fd Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sat, 8 Feb 2020 00:28:28 +0100 Subject: [PATCH 100/105] Remove old comment from Dockerfile --- Dockerfile | 1 - 1 file changed, 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 89405c7..4512444 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,3 @@ -# Would like to use python:3-alpine, but it doesn't have 'gcc' and the github3 library needs that. FROM python:3-alpine WORKDIR /action From 54c83875e4607667c7b90877617f195e87c2916f Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 9 Feb 2020 19:10:46 +0100 Subject: [PATCH 101/105] Don't copy README & LICENSE to Docker container --- Dockerfile | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 4512444..16d4cc7 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,10 +2,8 @@ FROM python:3-alpine WORKDIR /action -# Copy action metadata -COPY LICENSE README.md requirements.txt ./ # Copy action code -COPY entrypoint.py ./ +COPY requirements.txt entrypoint.py ./ COPY syndicate/ ./syndicate/ # Install action requirements From 598380b3ecc84072f3dbb1eda6d9924a51d671c7 Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 9 Feb 2020 19:17:29 +0100 Subject: [PATCH 102/105] Remove unnecessary parens from for loops --- entrypoint.py | 6 +++--- syndicate/__init__.py | 2 +- syndicate/utils.py | 6 +++--- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/entrypoint.py b/entrypoint.py index d1d9ba4..40be513 100755 --- a/entrypoint.py +++ b/entrypoint.py @@ -52,7 +52,7 @@ # Just focus on the added ones. indexed_paths_by_silo = { silo: results['added'] - for (silo, results) in syndicated_posts.items() + for silo, results in syndicated_posts.items() if results and 'added' in results } @@ -69,8 +69,8 @@ # ... # } silo_ids_by_path = {} - for (silo, indexed_paths) in indexed_paths_by_silo.items(): - for (path, ( sid, _ )) in indexed_paths.items(): + for silo, indexed_paths in indexed_paths_by_silo.items(): + for path, ( sid, _ ) in indexed_paths.items(): silo_ids_by_path.setdefault(path, {}) silo_ids_by_path[path][silo] = sid diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 560b8b2..16f5c4b 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -57,7 +57,7 @@ def elsewhere(posts, silos): action_log("I'll do what I can.") results = { silo:_syndicate(spec, api_keys[silo], posts) - for (silo, spec) in specs.items() + for silo, spec in specs.items() if silo in api_keys } if results: diff --git a/syndicate/utils.py b/syndicate/utils.py index e2d6f1e..18cedc9 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -163,7 +163,7 @@ def mark_syndicated_posts(silo_ids_by_path, fronted_posts_by_path): updated_fronted_posts_by_path = {} silos_included = set() - for (path, silo_ids_by_silo) in silo_ids_by_path.items(): + for path, silo_ids_by_silo in silo_ids_by_path.items(): fronted_post = fronted_posts_by_path[path] # Format: @@ -173,7 +173,7 @@ def mark_syndicated_posts(silo_ids_by_path, fronted_posts_by_path): # ... # } new_silo_ids = {} - for (silo, sid) in silo_ids_by_silo.items(): + for silo, sid in silo_ids_by_silo.items(): # Ignore already posts marked with this silo if not silo_id_for(fronted_post, silo): new_silo_ids[silo_key_for(silo)] = sid @@ -224,7 +224,7 @@ def commit_updated_posts(fronted_posts_by_path, silos): type='blob', content=frontmatter.dumps(fronted_post) ) - for (path, fronted_post) in fronted_posts_by_path.items() + for path, fronted_post in fronted_posts_by_path.items() ], base_tree=repo().get_git_tree(parent) ) From 9ade4e12ad43c16807f6ab4517de4d7c90ba9c6d Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 9 Feb 2020 19:19:56 +0100 Subject: [PATCH 103/105] Use `isinstance` instead of `type(...) == ...` --- syndicate/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/syndicate/utils.py b/syndicate/utils.py index 18cedc9..e78f666 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -130,7 +130,7 @@ def fronted(post): If `post` is actually already a `frontmatter.Post`, this is a no-op. """ assert post, "missing post" - if type(post) == frontmatter.Post: + if isinstance(post, frontmatter.Post): return post raw_contents = post.decoded_content.decode('utf-8') return frontmatter.loads(raw_contents) From 48f84920550bd3e6e5d1fd3e25efea0b939d00ac Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 9 Feb 2020 19:24:01 +0100 Subject: [PATCH 104/105] Make '_API_KEY' a named function instead of lambda --- syndicate/__init__.py | 7 +++++-- tests/test_syndicate.py | 6 +++--- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index 16f5c4b..c126c14 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -67,7 +67,6 @@ def elsewhere(posts, silos): return None ### privates ### -_API_KEY = lambda s: f"{s.upper()}_API_KEY" @functools.lru_cache(maxsize=10) def _locate(silo): @@ -84,4 +83,8 @@ def _syndicate(silo_spec, api_key, posts): def _get_api_key(silo): """Returns the API key for the given silo, as defined in the environment.""" assert silo, 'missing silo' - return os.getenv(_API_KEY(silo)) + return os.getenv(_api_key_for(silo)) + +def _api_key_for(silo): + """Returns the name of the environment variable expected to contain an API key for the given silo.""" + return f"{silo.upper()}_API_KEY" diff --git a/tests/test_syndicate.py b/tests/test_syndicate.py index 2deffd5..c62bf78 100644 --- a/tests/test_syndicate.py +++ b/tests/test_syndicate.py @@ -17,7 +17,7 @@ def test_elsewhere_returns_none_when_given_no_silos(): def test_elsewhere_returns_none_when_no_api_keys_exist_for_given_silos(monkeypatch): fake_silo = 'Fake_Silo' # Ensure we cannot use the fake silo adapter. - monkeypatch.delenv(syndicate._API_KEY(fake_silo), raising=False) + monkeypatch.delenv(syndicate._api_key_for(fake_silo), raising=False) assert not syndicate.elsewhere(['a post'], [fake_silo]) def test_elsewhere_returns_none_when_no_adapter_exists_for_given_silos(monkeypatch): @@ -25,7 +25,7 @@ def test_elsewhere_returns_none_when_no_adapter_exists_for_given_silos(monkeypat # Ensure we cannot find the fake silo adapter. monkeypatch.setattr(importlib.util, 'find_spec', lambda s: None) # Ensure we can use the fake silo adapter. - monkeypatch.setenv(syndicate._API_KEY(fake_silo), 'fake API key') + monkeypatch.setenv(syndicate._api_key_for(fake_silo), 'fake API key') assert not syndicate.elsewhere(['a post'], [fake_silo]) def test_elsewhere_returns_syndication_results_for_recognized_silos_when_given_api_keys(monkeypatch): @@ -41,5 +41,5 @@ def syndicate(posts, api_key): # Ensure we can load the fake silo adapter. monkeypatch.setattr(importlib, 'import_module', lambda s: MockSilo) # Ensure we can use the fake silo adapter. - monkeypatch.setenv(syndicate._API_KEY(fake_silo), 'fake API key') + monkeypatch.setenv(syndicate._api_key_for(fake_silo), 'fake API key') assert syndicate.elsewhere(['a post'], [fake_silo]) From a7606e890570c849e77faa35c14f33c382c8d04b Mon Sep 17 00:00:00 2001 From: Daniel Brady Date: Sun, 9 Feb 2020 19:49:39 +0100 Subject: [PATCH 105/105] Replace assertions with "unexpected value" errors --- syndicate/__init__.py | 12 ++++++++---- syndicate/silos/dev.py | 15 ++++++++++----- syndicate/utils.py | 39 ++++++++++++++++++++++++++------------- tests/test_dev.py | 8 ++++---- 4 files changed, 48 insertions(+), 26 deletions(-) diff --git a/syndicate/__init__.py b/syndicate/__init__.py index c126c14..bb8df01 100755 --- a/syndicate/__init__.py +++ b/syndicate/__init__.py @@ -71,18 +71,22 @@ def elsewhere(posts, silos): @functools.lru_cache(maxsize=10) def _locate(silo): """Locates the given silo adapter and returns its Python module name if found.""" - assert silo, 'missing silo' + if not silo: + raise ValueError('missing silo') return getattr(importlib.util.find_spec(f'syndicate.silos.{silo.lower()}'), 'name', None) def _syndicate(silo_spec, api_key, posts): """Loads and invokes the entrypoint of the given silo adaptor, returning the results.""" - assert silo_spec, 'missing silo spec' - assert api_key, 'missing API key' + if not silo_spec: + raise ValueError('missing silo spec') + if not api_key: + raise ValueError('missing API key') return importlib.import_module(silo_spec).syndicate(posts, api_key) def _get_api_key(silo): """Returns the API key for the given silo, as defined in the environment.""" - assert silo, 'missing silo' + if not silo: + raise ValueError('missing silo') return os.getenv(_api_key_for(silo)) def _api_key_for(silo): diff --git a/syndicate/silos/dev.py b/syndicate/silos/dev.py index 98f0692..6f37bd2 100644 --- a/syndicate/silos/dev.py +++ b/syndicate/silos/dev.py @@ -41,9 +41,12 @@ def _create(post, api_key=None): @see https://docs.dev.to/api/#operation/createArticle """ - assert api_key, "missing API key" - assert post, "missing post" - assert fronted(post).get('title'), "article is missing a title" + if not api_key: + raise ValueError("missing API key") + if not post: + raise ValueError("missing post") + if not fronted(post).get('title'): + raise ValueError("article is missing a title") payload = { 'article': { @@ -73,8 +76,10 @@ def _update(post, api_key=None): @see https://docs.dev.to/api/#operation/updateArticle """ - assert api_key, "missing API key" - assert post, "missing post" + if not api_key: + raise ValueError("missing API key") + if not post: + raise ValueError("missing post") endpoint = f'https://dev.to/api/articles/{silo_id_for(post, SILO_NAME)}' headers = {'api-key': api_key} diff --git a/syndicate/utils.py b/syndicate/utils.py index e78f666..06b4d5c 100644 --- a/syndicate/utils.py +++ b/syndicate/utils.py @@ -69,8 +69,10 @@ def repo(): repository this Github action is running in. @see https://pygithub.readthedocs.io/en/latest/github_objects/Repository.html#github.Repository.Repository """ - assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" - assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" + if not os.getenv("GITHUB_TOKEN"): + raise ValueError("missing GITHUB_TOKEN") + if not os.getenv("GITHUB_REPOSITORY"): + raise ValueError("missing GITHUB_REPOSITORY") gh = Github(os.getenv("GITHUB_TOKEN")) return gh.get_repo(os.getenv("GITHUB_REPOSITORY")) @@ -80,7 +82,8 @@ def parent_sha(): Returns the git SHA to use as parent for any commits generated by this Github workflow step. """ - assert os.getenv("GITHUB_SHA"), "missing GITHUB_SHA" + if not os.getenv("GITHUB_SHA"): + raise ValueError("missing GITHUB_SHA") return os.getenv('SYNDICATE_SHA', os.getenv("GITHUB_SHA")) def get_trigger_payload(): @@ -89,7 +92,8 @@ def get_trigger_payload(): files in the commit that triggered this Github workflow. @see https://pygithub.readthedocs.io/en/latest/github_objects/File.html#github.File.File """ - assert os.getenv("GITHUB_SHA"), "missing GITHUB_SHA" + if not os.getenv("GITHUB_SHA"): + raise ValueError("missing GITHUB_SHA") # NOTE # Explicitly using GITHUB_SHA to ensure we always have access to the changed # files even if other steps generate commits. @@ -113,7 +117,8 @@ def get_posts(post_dir=os.getenv('SYNDICATE_POST_DIR', 'posts')): added and modified in the commit that triggered this Github workflow. """ files = get_trigger_payload() - assert files, "target commit was empty" + if not files: + raise ValueError("target commit was empty") posts = [file for file in files if file.filename.startswith(post_dir)] return [ @@ -129,7 +134,8 @@ def fronted(post): If `post` is actually already a `frontmatter.Post`, this is a no-op. """ - assert post, "missing post" + if not post: + raise ValueError("missing post") if isinstance(post, frontmatter.Post): return post raw_contents = post.decoded_content.decode('utf-8') @@ -144,8 +150,10 @@ def silo_id_for(post, silo): Retrieves the ID appropriate for `silo` from the frontmatter of the given `post`; returns None if no relevant ID exists. """ - assert post, "missing post" - assert silo, "missing silo" + if not post: + raise ValueError("missing post") + if not silo: + raise ValueError("missing silo") return fronted(post).get(silo_key_for(silo)) def mark_syndicated_posts(silo_ids_by_path, fronted_posts_by_path): @@ -158,8 +166,10 @@ def mark_syndicated_posts(silo_ids_by_path, fronted_posts_by_path): Returns a dictionary which is the response of the commit request. """ - assert silo_ids_by_path, "missing silo IDs" - assert fronted_posts_by_path, "missing fronted posts" + if not silo_ids_by_path: + raise ValueError("missing silo IDs") + if not fronted_posts_by_path: + raise ValueError("missing fronted posts") updated_fronted_posts_by_path = {} silos_included = set() @@ -210,9 +220,12 @@ def commit_updated_posts(fronted_posts_by_path, silos): if not fronted_posts_by_path: action_log("All good: already marked.") return None - assert os.getenv("GITHUB_TOKEN"), "missing GITHUB_TOKEN" - assert os.getenv("GITHUB_REPOSITORY"), "missing GITHUB_REPOSITORY" - assert os.getenv("GITHUB_REF"), "missing GITHUB_REF" + if not os.getenv("GITHUB_TOKEN"): + raise ValueError("missing GITHUB_TOKEN") + if not os.getenv("GITHUB_REPOSITORY"): + raise ValueError("missing GITHUB_REPOSITORY") + if not os.getenv("GITHUB_REF"): + raise ValueError("missing GITHUB_REF") parent = parent_sha() # Create a new tree with our updated blobs. diff --git a/tests/test_dev.py b/tests/test_dev.py index 21cec36..e577761 100644 --- a/tests/test_dev.py +++ b/tests/test_dev.py @@ -6,11 +6,11 @@ import requests_mock def test_create_error_when_api_key_missing(): - with pytest.raises(AssertionError): + with pytest.raises(ValueError): dev._create(MockPost()) def test_create_error_when_post_missing(): - with pytest.raises(AssertionError): + with pytest.raises(ValueError): dev._create(None) def test_create_returns_nothing_when_request_fails(requests_mock, monkeypatch): @@ -30,11 +30,11 @@ def test_create_returns_something_on_success(requests_mock, monkeypatch): assert dev._create(MockPost(), api_key='fake_api_key') def test_update_error_when_api_key_missing(): - with pytest.raises(AssertionError): + with pytest.raises(ValueError): dev._update(MockPost()) def test_update_error_when_post_missing(): - with pytest.raises(AssertionError): + with pytest.raises(ValueError): dev._update(None) def test_update_returns_nothing_when_request_fails(requests_mock, monkeypatch):