Initial commit
This commit is contained in:
commit
9fc944e1dd
|
@ -0,0 +1,3 @@
|
||||||
|
venv
|
||||||
|
*.swp
|
||||||
|
|
|
@ -0,0 +1,76 @@
|
||||||
|
# Backblogger.py : Scan directories for images, and scaffold into a blog post.
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from datetime import datetime, date
|
||||||
|
|
||||||
|
def markdown_date(ts):
|
||||||
|
d = datetime.fromtimestamp(ts)
|
||||||
|
return f"{d.year}-{d.month}-{d.day}"
|
||||||
|
|
||||||
|
IMG_WIDTH = 760
|
||||||
|
def resize_rename(imgpath, article_number, image_number):
|
||||||
|
new_fn = f"article{article_number.zfill(2)}_image{image_number.zfill(2)}.{imgpath.split('.')[-1]}"
|
||||||
|
|
||||||
|
|
||||||
|
class BlogScaffold:
|
||||||
|
def __init__(self, path):
|
||||||
|
self.path = os.path.abspath(path)
|
||||||
|
self.data = { "images": [],
|
||||||
|
"blogfile": None
|
||||||
|
}
|
||||||
|
# Check the path for backblog metadata
|
||||||
|
self.scanned = os.path.exists(os.path.join(self.path, "backblog.json"))
|
||||||
|
if not self.scanned:
|
||||||
|
self.scan()
|
||||||
|
return
|
||||||
|
with open(os.path.join(self.path, "backblog.json"), "r") as f:
|
||||||
|
self.data = json.loads(f.read())
|
||||||
|
|
||||||
|
def scan(self):
|
||||||
|
_, _, files = next(os.walk(self.path))
|
||||||
|
for f in files:
|
||||||
|
self.data['images'].append({
|
||||||
|
"path": f,
|
||||||
|
"date": os.stat(os.path.join(self.path, f)).st_ctime
|
||||||
|
})
|
||||||
|
self.scanned = True
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
with open(os.path.join(self.path, "backblog.json"), "w") as f:
|
||||||
|
f.write(json.dumps(self.data))
|
||||||
|
|
||||||
|
def image_times(self):
|
||||||
|
for i in self.data['images']:
|
||||||
|
yield datetime.fromtimestamp(i['date'])
|
||||||
|
|
||||||
|
def markdown_template(self, article_number):
|
||||||
|
replace = {
|
||||||
|
"{{TITLE}}": "Backblog basic template about " + self.path,
|
||||||
|
"{{SLUG}}": os.path.basename(self.path),
|
||||||
|
"{{CATEGORY}}": "category",
|
||||||
|
"{{EARLIESTDATE}}": markdown_date(min([i['date'] for i in self.data['images']])),
|
||||||
|
"{{TODAY}}": str(date.today()),
|
||||||
|
"{{ARTICLENUM}}": article_number.zfill(2)
|
||||||
|
}
|
||||||
|
txt = None
|
||||||
|
with open("template.md", "r") as f:
|
||||||
|
txt = f.read()
|
||||||
|
img_template = txt.split("%%%")[1]
|
||||||
|
img_txt = ""
|
||||||
|
for i, image in enumerate(self.data['images']):
|
||||||
|
img_fn = resize_rename(image['path'], article_number, i)
|
||||||
|
this_txt = img_template
|
||||||
|
this_txt.replace("{{IMG_FN}}", img_fn)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if not self.scanned:
|
||||||
|
return f"<BlogScaffold path={self.path}, scanned=False>"
|
||||||
|
return f"<BlogScaffold path={self.path}, blogfile={self.data['blogfile']}, {len(self.data['images'])} image files>"
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
subdirs = os.listdir('..')
|
||||||
|
# don't scan program's directory
|
||||||
|
subdirs.remove(os.path.basename(os.path.abspath('.')))
|
||||||
|
scaffolds = [BlogScaffold(os.path.join('..', sd)) for sd in subdirs]
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
title: {{TITLE}}
|
||||||
|
slug: {{SLUG}}
|
||||||
|
category: {{CATEGORY}}
|
||||||
|
startdate: {{EARLIESTDATE}}
|
||||||
|
date: {{TODAY}}
|
||||||
|
modified: {{TODAY}}
|
||||||
|
|
||||||
|
![alt text](images/article{{ARTICLENUM}}_image01_header.{{EXT}})
|
||||||
|
|
||||||
|
# TITLE
|
||||||
|
|
||||||
|
asdf. etaoin shrdlu. LOREM IPSUM
|
||||||
|
|
||||||
|
%%%
|
||||||
|
![alt text](images/images/article{{ARTICLENUM}}_image01_header.{{EXT}})
|
||||||
|
%%%
|
||||||
|
|
||||||
|
## challenges
|
||||||
|
|
||||||
|
asdf. etaoin shrdlu. LOREM IPSUM
|
||||||
|
|
||||||
|
## favorite part
|
||||||
|
|
||||||
|
asdf. etaoin shrdlu. LOREM IPSUM
|
||||||
|
|
||||||
|
## what I learned
|
||||||
|
|
||||||
|
asdf. etaoin shrdlu. LOREM IPSUM
|
Loading…
Reference in New Issue