Skip to content

Commit

Permalink
Merge pull request #1 from dabrady/develop
Browse files Browse the repository at this point in the history
syndicate: a simple implementation of P.O.S.S.E.
  • Loading branch information
dabrady committed Feb 13, 2020
2 parents cff20e7 + a7606e8 commit 8a1e7b3
Show file tree
Hide file tree
Showing 16 changed files with 1,079 additions and 1 deletion.
72 changes: 72 additions & 0 deletions .github/workflows/example.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
name: Example syndicate flow
on:
push:
# Watch these branches...
branches:
- master
# ...for changes to these files.
paths:
- 'pages/**/*.mdx?'

jobs:
syndicate:
runs-on: ubuntu-latest
name: Syndicate posts
env:
# This is provided to all actions by Github, and needed to access the posts
# in your repository.
# @see https://help.github.com/en/actions/automating-your-workflow-with-github-actions/authenticating-with-the-github_token
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

# Tell me the path (relative to the project root) where your content lives
# so I can find them in the commit that triggered this flow.
# Defaults to 'posts' if not specified.
SYNDICATE_POST_DIR: 'pages/posts'

steps:
# This step will syndicate your content to DEV.to, if supported, but will
# NOT commit the platform-specific content IDs back to your repo. This is
# useful for bundling the results of multiple actions into a single commit.
#
# However, note that future syndications to DEV.to will be unaware of the
# drafts already created by this step and will result in duplicate drafts.
#
# To avoid this, include a future step which does not specify any silos, but
# sets the `mark_as_syndicated` flag to true. This will commit the syndicate
# IDs that have not already been added during this job.
- name: Push to DEV.to
uses: dabrady/syndicate@develop
with:
silos: DEV
env:
DEV_API_KEY: ${{ secrets.DEV_API_KEY }}

# This step will syndicate your content to Medium and Planet Mars, if
# supported, and will commit any new platform-specific content IDs to the
# appropriate files.
#
# Note that this will NOT commit IDs generated by previous actions, just
# the ones from this action.
- name: Push to Medium and Planet Mars and commit new IDs
uses: dabrady/syndicate@develop
with:
# Use a multiline YAML string to specify multiple silos.
silos: |
Medium
Planet_Mars
mark_as_syndicated: true
env:
MEDIUM_API_KEY: ${{ secrets.MEDIUM_API_KEY }}
PLANET_MARS_API_KEY: ${{ secrets.MARS_API_KEY }}

# This step will not syndicate any content to any silos, but instead will
# commit any new platform-specific content IDs generated by previous steps
# in this job to the appropriate files, if they haven't already been added.
#
# It's a nice way of bundling the results of multiple steps into a single
# commit, or just cleaning up at the end of a job and you didn't miss
# anything.
- name: Commit IDs of newly syndicated posts if necessary
uses: dabrady/syndicate@develop
with:
mark_as_syndicated: true
129 changes: 129 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class

# C extensions
*.so

# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST

# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec

# Installer logs
pip-log.txt
pip-delete-this-directory.txt

# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/

# Translations
*.mo
*.pot

# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal

# Flask stuff:
instance/
.webassets-cache

# Scrapy stuff:
.scrapy

# Sphinx documentation
docs/_build/

# PyBuilder
target/

# Jupyter Notebook
.ipynb_checkpoints

# IPython
profile_default/
ipython_config.py

# pyenv
.python-version

# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock

# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/

# Celery stuff
celerybeat-schedule
celerybeat.pid

# SageMath parsed files
*.sage.py

# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/

# Spyder project settings
.spyderproject
.spyproject

# Rope project settings
.ropeproject

# mkdocs documentation
/site

# mypy
.mypy_cache/
.dmypy.json
dmypy.json

# Pyre type checker
.pyre/
15 changes: 15 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
FROM python:3-alpine

WORKDIR /action

# Copy action code
COPY requirements.txt entrypoint.py ./
COPY syndicate/ ./syndicate/

# Install action requirements
RUN pip install --no-cache-dir -r ./requirements.txt

# Hardcoding WORKDIR into ENTRYPOINT.
# Can't use environment variables in "exec" form of ENTRYPOINT, but "exec" form
# is recommended.
ENTRYPOINT [ "/action/entrypoint.py" ]
Loading

0 comments on commit 8a1e7b3

Please sign in to comment.