continued tweaks
This commit is contained in:
parent
3957c44faa
commit
0f340433ae
@ -1,6 +1,11 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
# License: AGPL
|
# License: AGPL
|
||||||
#
|
#
|
||||||
|
#
|
||||||
|
# todo:
|
||||||
|
# Capture exceptions... add HTTP status errors (502) to meta!!
|
||||||
|
# so that an eventual index can show the problematic pages!
|
||||||
|
# Also: provide links to text only / html versions when diff HTML fails
|
||||||
|
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from etherdump import DATAPATH
|
from etherdump import DATAPATH
|
||||||
@ -12,6 +17,7 @@ from datetime import datetime
|
|||||||
from xml.etree import cElementTree as ET
|
from xml.etree import cElementTree as ET
|
||||||
from urllib import urlencode
|
from urllib import urlencode
|
||||||
from urllib2 import urlopen, HTTPError, URLError
|
from urllib2 import urlopen, HTTPError, URLError
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
# external dependencies (use pip to install these)
|
# external dependencies (use pip to install these)
|
||||||
import html5lib, jinja2
|
import html5lib, jinja2
|
||||||
@ -102,6 +108,11 @@ def get_template_env (tpath=None):
|
|||||||
env = jinja2.Environment(loader=loader)
|
env = jinja2.Environment(loader=loader)
|
||||||
return env
|
return env
|
||||||
|
|
||||||
|
def get_group_info(gid, info):
|
||||||
|
if 'groups' in info:
|
||||||
|
if gid in info['groups']:
|
||||||
|
return info['groups'][gid]
|
||||||
|
|
||||||
def main(args):
|
def main(args):
|
||||||
p = ArgumentParser("""
|
p = ArgumentParser("""
|
||||||
_ _ _
|
_ _ _
|
||||||
@ -117,7 +128,6 @@ def main(args):
|
|||||||
p.add_argument("--verbose", default=False, action="store_true", help="flag for verbose output")
|
p.add_argument("--verbose", default=False, action="store_true", help="flag for verbose output")
|
||||||
p.add_argument("--limit", type=int, default=None)
|
p.add_argument("--limit", type=int, default=None)
|
||||||
p.add_argument("--allpads", default=False, action="store_true", help="flag to process all pads")
|
p.add_argument("--allpads", default=False, action="store_true", help="flag to process all pads")
|
||||||
p.add_argument("--spider", default=False, action="store_true", help="flag to spider pads")
|
|
||||||
p.add_argument("--templatepath", default=os.path.join(DATAPATH, "templates"), help="directory with templates (override default files)")
|
p.add_argument("--templatepath", default=os.path.join(DATAPATH, "templates"), help="directory with templates (override default files)")
|
||||||
p.add_argument("--colors-template", default="pad_colors.html", help="pad with authorship colors template name: pad_colors.html")
|
p.add_argument("--colors-template", default="pad_colors.html", help="pad with authorship colors template name: pad_colors.html")
|
||||||
p.add_argument("--padlink", default=[], action="append", help="give a pad link pattern, example: 'http\:\/\/10\.1\.10\.1/p/(.*)'")
|
p.add_argument("--padlink", default=[], action="append", help="give a pad link pattern, example: 'http\:\/\/10\.1\.10\.1/p/(.*)'")
|
||||||
@ -126,8 +136,11 @@ def main(args):
|
|||||||
p.add_argument("--showurls", default=False, action="store_true", help="flag to display API URLs that are used (to stderr)")
|
p.add_argument("--showurls", default=False, action="store_true", help="flag to display API URLs that are used (to stderr)")
|
||||||
p.add_argument("--hidepaths", default=False, action="store_true", help="flag to not display paths")
|
p.add_argument("--hidepaths", default=False, action="store_true", help="flag to not display paths")
|
||||||
p.add_argument("--pretend", default=False, action="store_true", help="flag to not actually save")
|
p.add_argument("--pretend", default=False, action="store_true", help="flag to not actually save")
|
||||||
|
p.add_argument("--linkify", default=False, action="store_true", help="flag to process [[link]] forms (and follow when --spider is used)")
|
||||||
|
p.add_argument("--spider", default=False, action="store_true", help="flag to spider pads (requires --linkify)")
|
||||||
p.add_argument("--add-images", default=False, action="store_true", help="flag to add image tags")
|
p.add_argument("--add-images", default=False, action="store_true", help="flag to add image tags")
|
||||||
p.add_argument("--authors-css", default="authors.css", help="filename to save collected authorship css (nb: etherdump will overwrite this file!)")
|
p.add_argument("--force", default=False, action="store_true", help="force dump (even if not updated since last dump)")
|
||||||
|
p.add_argument("--authors-css", default=None, help="filename to save collected authorship css (nb: any existing file will be mercilessly overwritten), default: don't accumulate css")
|
||||||
|
|
||||||
# TODO css from pad --- ie specify a padid for a stylesheet!!!!!!
|
# TODO css from pad --- ie specify a padid for a stylesheet!!!!!!
|
||||||
# p.add_argument("--css", default="styles.css", help="padid of stylesheet")
|
# p.add_argument("--css", default="styles.css", help="padid of stylesheet")
|
||||||
@ -175,14 +188,37 @@ def main(args):
|
|||||||
done.add(padid)
|
done.add(padid)
|
||||||
|
|
||||||
data['padID'] = padid.encode("utf-8")
|
data['padID'] = padid.encode("utf-8")
|
||||||
|
|
||||||
if args.verbose:
|
if args.verbose:
|
||||||
print ("PADID \"{0}\"".format(padid).encode("utf-8"), file=sys.stderr)
|
print (u"PADID \"{0}\"".format(padid).encode("utf-8"), file=sys.stderr)
|
||||||
|
|
||||||
|
# g.yIRLMysh0PMsCMHc$
|
||||||
|
grouppat = re.compile(ur"^g\.(\w+)\$(.+)$")
|
||||||
|
m = grouppat.search(padid)
|
||||||
|
if m:
|
||||||
|
group = m.group(1)
|
||||||
|
ginfo = get_group_info(group, info)
|
||||||
|
if not ginfo:
|
||||||
|
print ("No info for group '{0}', skipping".format(group), file=sys.stderr)
|
||||||
|
continue
|
||||||
|
padid = m.group(2)
|
||||||
|
else:
|
||||||
|
group = None
|
||||||
|
ginfo = None
|
||||||
|
|
||||||
if not args.pretend:
|
if not args.pretend:
|
||||||
try:
|
try:
|
||||||
|
if ginfo:
|
||||||
|
os.makedirs(os.path.join(args.path, ginfo['name']))
|
||||||
|
else:
|
||||||
os.makedirs(args.path)
|
os.makedirs(args.path)
|
||||||
except OSError:
|
except OSError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
retry = True
|
||||||
|
tries = 1
|
||||||
|
while retry:
|
||||||
|
retry = False
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# _
|
# _
|
||||||
@ -192,14 +228,18 @@ def main(args):
|
|||||||
# |_| |_| |_|\___|\__\__,_|
|
# |_| |_| |_|\___|\__\__,_|
|
||||||
|
|
||||||
meta_url = urlify(padid, ext=".json")
|
meta_url = urlify(padid, ext=".json")
|
||||||
meta_out = "{0}/{1}".format(args.path, meta_url.encode("utf-8"))
|
|
||||||
raw_url = urlify(padid, ext=".txt")
|
raw_url = urlify(padid, ext=".txt")
|
||||||
raw_out = "{0}/{1}".format(args.path, raw_url.encode("utf-8"))
|
|
||||||
colors_url = urlify(padid, ext=".html")
|
colors_url = urlify(padid, ext=".html")
|
||||||
|
|
||||||
|
if ginfo:
|
||||||
|
meta_out = "{0}/{1}/{2}".format(args.path, ginfo['name'], meta_url.encode("utf-8"))
|
||||||
|
raw_out = "{0}/{1}/{2}".format(args.path, ginfo['name'], raw_url.encode("utf-8"))
|
||||||
|
colors_out = "{0}/{1}/{2}".format(args.path, ginfo['name'], colors_url.encode("utf-8"))
|
||||||
|
else:
|
||||||
|
meta_out = "{0}/{1}".format(args.path, meta_url.encode("utf-8"))
|
||||||
|
raw_out = "{0}/{1}".format(args.path, raw_url.encode("utf-8"))
|
||||||
colors_out = "{0}/{1}".format(args.path, colors_url.encode("utf-8"))
|
colors_out = "{0}/{1}".format(args.path, colors_url.encode("utf-8"))
|
||||||
|
|
||||||
if not args.hidepaths:
|
|
||||||
print (meta_out, file=sys.stderr)
|
|
||||||
if not args.pretend:
|
if not args.pretend:
|
||||||
meta = {}
|
meta = {}
|
||||||
meta['padid'] = padid
|
meta['padid'] = padid
|
||||||
@ -208,6 +248,19 @@ def main(args):
|
|||||||
print (revisions_url, file=sys.stderr)
|
print (revisions_url, file=sys.stderr)
|
||||||
meta['total_revisions'] = json.load(urlopen(revisions_url))['data']['revisions']
|
meta['total_revisions'] = json.load(urlopen(revisions_url))['data']['revisions']
|
||||||
|
|
||||||
|
# CHECK REVISIONS (against existing meta)
|
||||||
|
if meta['total_revisions'] == 0:
|
||||||
|
if args.verbose:
|
||||||
|
print (" pad has no revisions, skipping", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
if os.path.exists(meta_out):
|
||||||
|
with open(meta_out) as f:
|
||||||
|
old_meta = json.load(f)
|
||||||
|
if not args.force and old_meta['total_revisions'] == meta['total_revisions']:
|
||||||
|
if args.verbose:
|
||||||
|
print (" skipping (up to date)", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
|
||||||
lastedited_url = apiurl+'getLastEdited?'+urlencode(data)
|
lastedited_url = apiurl+'getLastEdited?'+urlencode(data)
|
||||||
if args.showurls:
|
if args.showurls:
|
||||||
print (lastedited_url, file=sys.stderr)
|
print (lastedited_url, file=sys.stderr)
|
||||||
@ -223,21 +276,24 @@ def main(args):
|
|||||||
meta['colors'] = colors_url
|
meta['colors'] = colors_url
|
||||||
meta['raw'] = raw_url
|
meta['raw'] = raw_url
|
||||||
meta['meta'] = meta_url
|
meta['meta'] = meta_url
|
||||||
with open(meta_out, "w") as f:
|
# defer output to LAST STEP (as confirmation)
|
||||||
json.dump(meta, f)
|
|
||||||
|
|
||||||
# _ __ __ ___ __
|
# _ __ __ ___ __
|
||||||
# | '__/ _` \ \ /\ / /
|
# | '__/ _` \ \ /\ / /
|
||||||
# | | | (_| |\ V V /
|
# | | | (_| |\ V V /
|
||||||
# |_| \__,_| \_/\_/
|
# |_| \__,_| \_/\_/
|
||||||
|
|
||||||
if not args.hidepaths:
|
|
||||||
print (raw_out, file=sys.stderr)
|
|
||||||
text_url = apiurl+"getText?"+urlencode(data)
|
text_url = apiurl+"getText?"+urlencode(data)
|
||||||
if args.showurls:
|
if args.showurls:
|
||||||
print (text_url, file=sys.stderr)
|
print (text_url, file=sys.stderr)
|
||||||
if not args.pretend:
|
if not args.pretend:
|
||||||
rawText = json.load(urlopen(text_url))['data']['text']
|
rawText = json.load(urlopen(text_url))['data']['text']
|
||||||
|
if rawText.strip() == "":
|
||||||
|
if args.verbose:
|
||||||
|
print (" empty text, skipping", file=sys.stderr)
|
||||||
|
continue
|
||||||
|
if not args.hidepaths:
|
||||||
|
print (raw_out, file=sys.stderr)
|
||||||
with open(raw_out, "w") as f:
|
with open(raw_out, "w") as f:
|
||||||
f.write(rawText.encode("utf-8"))
|
f.write(rawText.encode("utf-8"))
|
||||||
|
|
||||||
@ -269,6 +325,7 @@ def main(args):
|
|||||||
# Stage 1: Process as text
|
# Stage 1: Process as text
|
||||||
# Process [[wikilink]] style links
|
# Process [[wikilink]] style links
|
||||||
# and (optionally) add linked page names to spider todo list
|
# and (optionally) add linked page names to spider todo list
|
||||||
|
if args.linkify:
|
||||||
html, links = linkify(html)
|
html, links = linkify(html)
|
||||||
if args.spider:
|
if args.spider:
|
||||||
for l in links:
|
for l in links:
|
||||||
@ -338,8 +395,8 @@ def main(args):
|
|||||||
if len(i):
|
if len(i):
|
||||||
selector, rule = i.split(' ',1)
|
selector, rule = i.split(' ',1)
|
||||||
authors_css_rules[selector] = rule
|
authors_css_rules[selector] = rule
|
||||||
style = '' # strip the individual style tag from each page (only exports to authors-css file)
|
# replace individual style with a ref to the authors-css
|
||||||
# nb: it's up to the template to refer to the authors-css file
|
style = '<link rel="stylesheet" type="text/css" href="{0}">'.format(args.authors_css)
|
||||||
else:
|
else:
|
||||||
style = ET.tostring(style, method="html")
|
style = ET.tostring(style, method="html")
|
||||||
else:
|
else:
|
||||||
@ -362,6 +419,11 @@ def main(args):
|
|||||||
lastedited = meta['lastedited']
|
lastedited = meta['lastedited']
|
||||||
).encode("utf-8"))
|
).encode("utf-8"))
|
||||||
|
|
||||||
|
# OUTPUT METADATA (finally)
|
||||||
|
if not args.hidepaths:
|
||||||
|
print (meta_out, file=sys.stderr)
|
||||||
|
with open(meta_out, "w") as f:
|
||||||
|
json.dump(meta, f)
|
||||||
# _
|
# _
|
||||||
# | | ___ ___ _ __
|
# | | ___ ___ _ __
|
||||||
# | |/ _ \ / _ \| '_ \
|
# | |/ _ \ / _ \| '_ \
|
||||||
@ -372,8 +434,24 @@ def main(args):
|
|||||||
count += 1
|
count += 1
|
||||||
if args.limit and count >= args.limit:
|
if args.limit and count >= args.limit:
|
||||||
break
|
break
|
||||||
except TypeError:
|
|
||||||
print ("ERROR, skipping!", file=sys.stderr)
|
# except HTTPError as e:
|
||||||
|
# retry = True
|
||||||
|
|
||||||
|
# except TypeError as e:
|
||||||
|
# print ("TypeError, skipping!", file=sys.stderr)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print ("[{0}] Exception: {1}".format(tries, e), file=sys.stderr)
|
||||||
|
sleep(3)
|
||||||
|
retry = True
|
||||||
|
|
||||||
|
if retry:
|
||||||
|
tries += 1
|
||||||
|
if tries > 5:
|
||||||
|
print (" GIVING UP", file=sys.stderr)
|
||||||
|
retry = False
|
||||||
|
|
||||||
|
|
||||||
# Write the unified CSS with authors
|
# Write the unified CSS with authors
|
||||||
if args.authors_css:
|
if args.authors_css:
|
||||||
|
|||||||
@ -5,7 +5,6 @@
|
|||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<meta revision="{{revision}}">
|
<meta revision="{{revision}}">
|
||||||
<link rel="stylesheet" type="text/css" href="pad.css">
|
<link rel="stylesheet" type="text/css" href="pad.css">
|
||||||
<link rel="stylesheet" type="text/css" href="authors.css">
|
|
||||||
{{ style }}
|
{{ style }}
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
|
|||||||
@ -4,5 +4,9 @@
|
|||||||
"hostname": "localhost",
|
"hostname": "localhost",
|
||||||
"apiversion": "1.2.9",
|
"apiversion": "1.2.9",
|
||||||
"apiurl": "/api/",
|
"apiurl": "/api/",
|
||||||
"apikey": "8f55f9ede1b3f5d88b3c54eb638225a7bb71c64867786b608abacfdb7d418be1"
|
"apikey": "8f55f9ede1b3f5d88b3c54eb638225a7bb71c64867786b608abacfdb7d418be1",
|
||||||
|
"groups": {
|
||||||
|
"71FpVh4MZBvl8VZ6": {"name": "Transmediale", "id": 43},
|
||||||
|
"HyYfoX3Q6S5utxs5": {"name": "test", "id": 42 }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user