|
@ -20,8 +20,8 @@ from etherpump.commands.html5tidy import html5tidy |
|
|
|
|
|
|
|
|
""" |
|
|
""" |
|
|
pull(meta): |
|
|
pull(meta): |
|
|
Update meta data files for those that have changed. |
|
|
Update meta data files for those that have changed. |
|
|
Check for changed pads by looking at revisions & comparing to existing |
|
|
Check for changed pads by looking at revisions & comparing to existing |
|
|
todo... |
|
|
todo... |
|
|
use/prefer public interfaces ? (export functions) |
|
|
use/prefer public interfaces ? (export functions) |
|
|
""" |
|
|
""" |
|
@ -31,487 +31,498 @@ skipped, saved = 0, 0 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def try_deleting(files): |
|
|
async def try_deleting(files): |
|
|
for f in files: |
|
|
for f in files: |
|
|
try: |
|
|
try: |
|
|
path = trio.Path(f) |
|
|
path = trio.Path(f) |
|
|
if os.path.exists(path): |
|
|
if os.path.exists(path): |
|
|
await path.rmdir() |
|
|
await path.rmdir() |
|
|
except Exception as exception: |
|
|
except Exception as exception: |
|
|
print("PANIC: {}".format(exception)) |
|
|
print("PANIC: {}".format(exception)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def build_argument_parser(args): |
|
|
def build_argument_parser(args): |
|
|
parser = ArgumentParser( |
|
|
parser = ArgumentParser( |
|
|
"Check for pads that have changed since last sync (according to .meta.json)" |
|
|
"Check for pads that have changed since last sync (according to .meta.json)" |
|
|
) |
|
|
) |
|
|
parser.add_argument("padid", nargs="*", default=[]) |
|
|
parser.add_argument("padid", nargs="*", default=[]) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--glob", default=False, help="download pads matching a glob pattern" |
|
|
"--glob", default=False, help="download pads matching a glob pattern" |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--padinfo", |
|
|
"--padinfo", |
|
|
default=".etherpump/settings.json", |
|
|
default=".etherpump/settings.json", |
|
|
help="settings, default: .etherpump/settings.json", |
|
|
help="settings, default: .etherpump/settings.json", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--zerorevs", |
|
|
"--zerorevs", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="include pads with zero revisions, default: False (i.e. pads with no revisions are skipped)", |
|
|
help="include pads with zero revisions, default: False (i.e. pads with no revisions are skipped)", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--pub", |
|
|
"--pub", |
|
|
default="p", |
|
|
default="p", |
|
|
help="folder to store files for public pads, default: p", |
|
|
help="folder to store files for public pads, default: p", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--group", |
|
|
"--group", |
|
|
default="g", |
|
|
default="g", |
|
|
help="folder to store files for group pads, default: g", |
|
|
help="folder to store files for group pads, default: g", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--skip", |
|
|
"--skip", |
|
|
default=None, |
|
|
default=None, |
|
|
type=int, |
|
|
type=int, |
|
|
help="skip this many items, default: None", |
|
|
help="skip this many items, default: None", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--connection", |
|
|
"--connection", |
|
|
default=200, |
|
|
default=200, |
|
|
type=int, |
|
|
type=int, |
|
|
help="number of connections to run concurrently", |
|
|
help="number of connections to run concurrently", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--meta", |
|
|
"--meta", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="download meta to PADID.meta.json, default: False", |
|
|
help="download meta to PADID.meta.json, default: False", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--text", |
|
|
"--text", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="download text to PADID.txt, default: False", |
|
|
help="download text to PADID.txt, default: False", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--html", |
|
|
"--html", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="download html to PADID.html, default: False", |
|
|
help="download html to PADID.html, default: False", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--dhtml", |
|
|
"--dhtml", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="download dhtml to PADID.diff.html, default: False", |
|
|
help="download dhtml to PADID.diff.html, default: False", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--all", |
|
|
"--all", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="download all files (meta, text, html, dhtml), default: False", |
|
|
help="download all files (meta, text, html, dhtml), default: False", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--folder", |
|
|
"--folder", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="dump files in a folder named PADID (meta, text, html, dhtml), default: False", |
|
|
help="dump files in a folder named PADID (meta, text, html, dhtml), default: False", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--output", |
|
|
"--output", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="output changed padids on stdout", |
|
|
help="output changed padids on stdout", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--force", |
|
|
"--force", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="reload, even if revisions count matches previous", |
|
|
help="reload, even if revisions count matches previous", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--no-raw-ext", |
|
|
"--no-raw-ext", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="save plain text as padname with no (additional) extension", |
|
|
help="save plain text as padname with no (additional) extension", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--fix-names", |
|
|
"--fix-names", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="normalize padid's (no spaces, special control chars) for use in file names", |
|
|
help="normalize padid's (no spaces, special control chars) for use in file names", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--filter-ext", default=None, help="filter pads by extension" |
|
|
"--filter-ext", default=None, help="filter pads by extension" |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--css", |
|
|
"--css", |
|
|
default="/styles.css", |
|
|
default="/styles.css", |
|
|
help="add css url to output pages, default: /styles.css", |
|
|
help="add css url to output pages, default: /styles.css", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--script", |
|
|
"--script", |
|
|
default="/versions.js", |
|
|
default="/versions.js", |
|
|
help="add script url to output pages, default: /versions.js", |
|
|
help="add script url to output pages, default: /versions.js", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--nopublish", |
|
|
"--nopublish", |
|
|
default="__NOPUBLISH__", |
|
|
default="__NOPUBLISH__", |
|
|
help="no publish magic word, default: __NOPUBLISH__", |
|
|
help="no publish magic word, default: __NOPUBLISH__", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--publish", |
|
|
"--publish", |
|
|
default="__PUBLISH__", |
|
|
default="__PUBLISH__", |
|
|
help="the publish magic word, default: __PUBLISH__", |
|
|
help="the publish magic word, default: __PUBLISH__", |
|
|
) |
|
|
) |
|
|
parser.add_argument( |
|
|
parser.add_argument( |
|
|
"--publish-opt-in", |
|
|
"--publish-opt-in", |
|
|
default=False, |
|
|
default=False, |
|
|
action="store_true", |
|
|
action="store_true", |
|
|
help="ensure `--publish` is honoured instead of `--nopublish`", |
|
|
help="ensure `--publish` is honoured instead of `--nopublish`", |
|
|
) |
|
|
) |
|
|
return parser |
|
|
return parser |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def get_padids(args, info, data, session): |
|
|
async def get_padids(args, info, data, session): |
|
|
if args.padid: |
|
|
if args.padid: |
|
|
padids = args.padid |
|
|
padids = args.padid |
|
|
elif args.glob: |
|
|
elif args.glob: |
|
|
url = info["localapiurl"] + "listAllPads?" + urlencode(data) |
|
|
url = info["localapiurl"] + "listAllPads?" + urlencode(data) |
|
|
padids = await agetjson(session, url) |
|
|
padids = await agetjson(session, url) |
|
|
padids = padids["data"]["padIDs"] |
|
|
padids = padids["data"]["padIDs"] |
|
|
padids = [x for x in padids if fnmatch(x, args.glob)] |
|
|
padids = [x for x in padids if fnmatch(x, args.glob)] |
|
|
else: |
|
|
else: |
|
|
url = info["localapiurl"] + "listAllPads?" + urlencode(data) |
|
|
url = info["localapiurl"] + "listAllPads?" + urlencode(data) |
|
|
padids = await agetjson(session, url) |
|
|
padids = await agetjson(session, url) |
|
|
padids = padids["data"]["padIDs"] |
|
|
padids = padids["data"]["padIDs"] |
|
|
|
|
|
|
|
|
padids.sort() |
|
|
padids.sort() |
|
|
return padids |
|
|
return padids |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def handle_pad(args, padid, data, info, session): |
|
|
async def handle_pad(args, padid, data, info, session): |
|
|
global skipped, saved |
|
|
global skipped, saved |
|
|
|
|
|
|
|
|
raw_ext = ".raw.txt" |
|
|
raw_ext = ".raw.txt" |
|
|
if args.no_raw_ext: |
|
|
if args.no_raw_ext: |
|
|
raw_ext = "" |
|
|
raw_ext = "" |
|
|
|
|
|
|
|
|
data["padID"] = padid |
|
|
data["padID"] = padid |
|
|
p = padpath(padid, args.pub, args.group, args.fix_names) |
|
|
p = padpath(padid, args.pub, args.group, args.fix_names) |
|
|
if args.folder: |
|
|
if args.folder: |
|
|
p = os.path.join(p, padid) |
|
|
p = os.path.join(p, padid) |
|
|
|
|
|
|
|
|
metapath = p + ".meta.json" |
|
|
metapath = p + ".meta.json" |
|
|
revisions = None |
|
|
revisions = None |
|
|
tries = 1 |
|
|
tries = 1 |
|
|
skip = False |
|
|
skip = False |
|
|
padurlbase = re.sub(r"api/1.2.9/$", "p/", info["apiurl"]) |
|
|
padurlbase = re.sub(r"api/1.2.9/$", "p/", info["apiurl"]) |
|
|
meta = {} |
|
|
meta = {} |
|
|
|
|
|
|
|
|
while True: |
|
|
while True: |
|
|
try: |
|
|
try: |
|
|
if os.path.exists(metapath): |
|
|
if os.path.exists(metapath): |
|
|
async with await trio.open_file(metapath) as f: |
|
|
async with await trio.open_file(metapath) as f: |
|
|
contents = await f.read() |
|
|
contents = await f.read() |
|
|
meta.update(json.loads(contents)) |
|
|
meta.update(json.loads(contents)) |
|
|
url = ( |
|
|
url = ( |
|
|
info["localapiurl"] + "getRevisionsCount?" + urlencode(data) |
|
|
info["localapiurl"] + "getRevisionsCount?" + urlencode(data) |
|
|
) |
|
|
) |
|
|
response = await agetjson(session, url) |
|
|
response = await agetjson(session, url) |
|
|
revisions = response["data"]["revisions"] |
|
|
revisions = response["data"]["revisions"] |
|
|
if meta["revisions"] == revisions and not args.force: |
|
|
if meta["revisions"] == revisions and not args.force: |
|
|
skip = True |
|
|
skip = True |
|
|
reason = "No new revisions, we already have the latest local copy" |
|
|
reason = "No new revisions, we already have the latest local copy" |
|
|
break |
|
|
break |
|
|
|
|
|
|
|
|
meta["padid"] = padid |
|
|
meta["padid"] = padid |
|
|
versions = meta["versions"] = [] |
|
|
versions = meta["versions"] = [] |
|
|
versions.append( |
|
|
versions.append( |
|
|
{"url": padurlbase + quote(padid), "type": "pad", "code": 200,} |
|
|
{"url": padurlbase + quote(padid), "type": "pad", "code": 200,} |
|
|
) |
|
|
) |
|
|
|
|
|
|
|
|
if revisions is None: |
|
|
if revisions is None: |
|
|
url = ( |
|
|
url = ( |
|
|
info["localapiurl"] + "getRevisionsCount?" + urlencode(data) |
|
|
info["localapiurl"] + "getRevisionsCount?" + urlencode(data) |
|
|
) |
|
|
) |
|
|
response = await agetjson(session, url) |
|
|
response = await agetjson(session, url) |
|
|
meta["revisions"] = response["data"]["revisions"] |
|
|
meta["revisions"] = response["data"]["revisions"] |
|
|
else: |
|
|
else: |
|
|
meta["revisions"] = revisions |
|
|
meta["revisions"] = revisions |
|
|
|
|
|
|
|
|
if (meta["revisions"] == 0) and (not args.zerorevs): |
|
|
if (meta["revisions"] == 0) and (not args.zerorevs): |
|
|
skip = True |
|
|
skip = True |
|
|
reason = "0 revisions, this pad was never edited" |
|
|
reason = "0 revisions, this pad was never edited" |
|
|
break |
|
|
break |
|
|
|
|
|
|
|
|
# todo: load more metadata! |
|
|
# todo: load more metadata! |
|
|
meta["group"], meta["pad"] = splitpadname(padid) |
|
|
meta["group"], meta["pad"] = splitpadname(padid) |
|
|
meta["pathbase"] = p |
|
|
meta["pathbase"] = p |
|
|
|
|
|
|
|
|
url = info["localapiurl"] + "getLastEdited?" + urlencode(data) |
|
|
url = info["localapiurl"] + "getLastEdited?" + urlencode(data) |
|
|
response = await agetjson(session, url) |
|
|
response = await agetjson(session, url) |
|
|
meta["lastedited_raw"] = int(response["data"]["lastEdited"]) |
|
|
meta["lastedited_raw"] = int(response["data"]["lastEdited"]) |
|
|
|
|
|
|
|
|
meta["lastedited_iso"] = datetime.fromtimestamp( |
|
|
meta["lastedited_iso"] = datetime.fromtimestamp( |
|
|
int(meta["lastedited_raw"]) / 1000 |
|
|
int(meta["lastedited_raw"]) / 1000 |
|
|
).isoformat() |
|
|
).isoformat() |
|
|
|
|
|
|
|
|
url = info["localapiurl"] + "listAuthorsOfPad?" + urlencode(data) |
|
|
url = info["localapiurl"] + "listAuthorsOfPad?" + urlencode(data) |
|
|
response = await agetjson(session, url) |
|
|
response = await agetjson(session, url) |
|
|
meta["author_ids"] = response["data"]["authorIDs"] |
|
|
meta["author_ids"] = response["data"]["authorIDs"] |
|
|
|
|
|
|
|
|
break |
|
|
break |
|
|
except HTTPError as e: |
|
|
except HTTPError as e: |
|
|
tries += 1 |
|
|
tries += 1 |
|
|
if tries > 3: |
|
|
if tries > 3: |
|
|
print( |
|
|
print( |
|
|
"Too many failures ({0}), skipping".format(padid), |
|
|
"Too many failures ({0}), skipping".format(padid), |
|
|
file=sys.stderr, |
|
|
file=sys.stderr, |
|
|
) |
|
|
) |
|
|
skip = True |
|
|
skip = True |
|
|
reason = "PANIC, couldn't download the pad contents" |
|
|
reason = "PANIC, couldn't download the pad contents" |
|
|
break |
|
|
break |
|
|
else: |
|
|
else: |
|
|
await trio.sleep(1) |
|
|
await trio.sleep(1) |
|
|
except TypeError as e: |
|
|
except TypeError as e: |
|
|
print( |
|
|
print( |
|
|
"Type Error loading pad {0} (phantom pad?), skipping".format( |
|
|
"Type Error loading pad {0} (phantom pad?), skipping".format( |
|
|
padid |
|
|
padid |
|
|
), |
|
|
), |
|
|
file=sys.stderr, |
|
|
file=sys.stderr, |
|
|
) |
|
|
) |
|
|
skip = True |
|
|
skip = True |
|
|
reason = "PANIC, couldn't download the pad contents" |
|
|
reason = "PANIC, couldn't download the pad contents" |
|
|
break |
|
|
break |
|
|
|
|
|
|
|
|
# Note(decentral1se): cannot track this bug down but basically the `data` |
|
|
# Note(decentral1se): cannot track this bug down but basically the `data` |
|
|
# and `padid` are getting out of sync and it is ending up that the same pad |
|
|
# and `padid` are getting out of sync and it is ending up that the same pad |
|
|
# over and over again is downloaded. This resets things in a way that it |
|
|
# over and over again is downloaded. This resets things in a way that it |
|
|
# works. This is a hack and one day TM I will find out how to fix it proper |
|
|
# works. This is a hack and one day TM I will find out how to fix it proper |
|
|
data["padID"] = padid |
|
|
data["padID"] = padid |
|
|
|
|
|
|
|
|
if skip: |
|
|
if skip: |
|
|
print("[ ] {} (skipped, reason: {})".format(padid, reason)) |
|
|
print("[ ] {} (skipped, reason: {})".format(padid, reason)) |
|
|
skipped += 1 |
|
|
skipped += 1 |
|
|
return |
|
|
return |
|
|
|
|
|
|
|
|
if args.output: |
|
|
if args.output: |
|
|
print(padid) |
|
|
print(padid) |
|
|
|
|
|
|
|
|
if args.all or (args.meta or args.text or args.html or args.dhtml): |
|
|
if args.all or (args.meta or args.text or args.html or args.dhtml): |
|
|
try: |
|
|
try: |
|
|
path = trio.Path(os.path.split(metapath)[0]) |
|
|
path = trio.Path(os.path.split(metapath)[0]) |
|
|
if not os.path.exists(path): |
|
|
if not os.path.exists(path): |
|
|
await path.mkdir() |
|
|
await path.mkdir() |
|
|
except OSError: |
|
|
except OSError: |
|
|
# Note(decentral1se): the path already exists |
|
|
# Note(decentral1se): the path already exists |
|
|
pass |
|
|
pass |
|
|
|
|
|
|
|
|
if args.all or args.text: |
|
|
if args.all or args.text: |
|
|
url = info["localapiurl"] + "getText?" + urlencode(data) |
|
|
url = info["localapiurl"] + "getText?" + urlencode(data) |
|
|
text = await agetjson(session, url) |
|
|
text = await agetjson(session, url) |
|
|
ver = {"type": "text"} |
|
|
ver = {"type": "text"} |
|
|
versions.append(ver) |
|
|
versions.append(ver) |
|
|
ver["code"] = text["_code"] |
|
|
ver["code"] = text["_code"] |
|
|
|
|
|
|
|
|
if text["_code"] == 200: |
|
|
if text["_code"] == 200: |
|
|
text = text["data"]["text"] |
|
|
text = text["data"]["text"] |
|
|
|
|
|
|
|
|
########################################## |
|
|
########################################## |
|
|
## ENFORCE __NOPUBLISH__ MAGIC WORD |
|
|
## ENFORCE __NOPUBLISH__ MAGIC WORD |
|
|
########################################## |
|
|
########################################## |
|
|
if args.nopublish in text: |
|
|
if args.nopublish in text: |
|
|
await try_deleting( |
|
|
await try_deleting( |
|
|
( |
|
|
( |
|
|
p + raw_ext, |
|
|
p + raw_ext, |
|
|
p + ".raw.html", |
|
|
p + ".raw.html", |
|
|
p + ".diff.html", |
|
|
p + ".diff.html", |
|
|
p + ".meta.json", |
|
|
p + ".meta.json", |
|
|
) |
|
|
) |
|
|
) |
|
|
) |
|
|
print( |
|
|
print( |
|
|
"[ ] {} (deleted, reason: explicit __NOPUBLISH__)".format( |
|
|
"[ ] {} (deleted, reason: explicit __NOPUBLISH__)".format( |
|
|
padid |
|
|
padid |
|
|
) |
|
|
) |
|
|
) |
|
|
) |
|
|
skipped += 1 |
|
|
skipped += 1 |
|
|
return False |
|
|
return False |
|
|
|
|
|
|
|
|
########################################## |
|
|
########################################## |
|
|
## ENFORCE __PUBLISH__ MAGIC WORD |
|
|
## ENFORCE __PUBLISH__ MAGIC WORD |
|
|
########################################## |
|
|
########################################## |
|
|
if args.publish_opt_in and args.publish not in text: |
|
|
if args.publish_opt_in and args.publish not in text: |
|
|
await try_deleting( |
|
|
await try_deleting( |
|
|
( |
|
|
( |
|
|
p + raw_ext, |
|
|
p + raw_ext, |
|
|
p + ".raw.html", |
|
|
p + ".raw.html", |
|
|
p + ".diff.html", |
|
|
p + ".diff.html", |
|
|
p + ".meta.json", |
|
|
p + ".meta.json", |
|
|
) |
|
|
) |
|
|
) |
|
|
) |
|
|
print("[ ] {} (deleted, reason: publish opt-out)".format(padid)) |
|
|
print("[ ] {} (deleted, reason: publish opt-out)".format(padid)) |
|
|
skipped += 1 |
|
|
skipped += 1 |
|
|
return False |
|
|
return False |
|
|
|
|
|
|
|
|
ver["path"] = p + raw_ext |
|
|
ver["path"] = p + raw_ext |
|
|
ver["url"] = quote(ver["path"]) |
|
|
ver["url"] = quote(ver["path"]) |
|
|
async with await trio.open_file(ver["path"], "w") as f: |
|
|
async with await trio.open_file(ver["path"], "w") as f: |
|
|
try: |
|
|
try: |
|
|
# Note(decentral1se): unicode handling... |
|
|
# Note(decentral1se): unicode handling... |
|
|
safe_text = text.encode("utf-8", "replace").decode() |
|
|
safe_text = text.encode("utf-8", "replace").decode() |
|
|
await f.write(safe_text) |
|
|
await f.write(safe_text) |
|
|
except Exception as exception: |
|
|
except Exception as exception: |
|
|
print("PANIC: {}".format(exception)) |
|
|
print("PANIC: {}".format(exception)) |
|
|
|
|
|
|
|
|
# once the content is settled, compute a hash |
|
|
# once the content is settled, compute a hash |
|
|
# and link it in the metadata! |
|
|
# and link it in the metadata! |
|
|
|
|
|
|
|
|
links = [] |
|
|
links = [] |
|
|
if args.css: |
|
|
if args.css: |
|
|
links.append({"href": args.css, "rel": "stylesheet"}) |
|
|
links.append({"href": args.css, "rel": "stylesheet"}) |
|
|
# todo, make this process reflect which files actually were made |
|
|
# todo, make this process reflect which files actually were made |
|
|
versionbaseurl = quote(padid) |
|
|
versionbaseurl = quote(padid) |
|
|
links.append( |
|
|
links.append( |
|
|
{ |
|
|
{ |
|
|
"href": versions[0]["url"], |
|
|
"href": versions[0]["url"], |
|
|
"rel": "alternate", |
|
|
"rel": "alternate", |
|
|
"type": "text/html", |
|
|
"type": "text/html", |
|
|
"title": "Etherpad", |
|
|
"title": "Etherpad", |
|
|
} |
|
|
} |
|
|
) |
|
|
) |
|
|
if args.all or args.text: |
|
|
if args.all or args.text: |
|
|
links.append( |
|
|
links.append( |
|
|
{ |
|
|
{ |
|
|
"href": versionbaseurl + raw_ext, |
|
|
"href": versionbaseurl + raw_ext, |
|
|
"rel": "alternate", |
|
|
"rel": "alternate", |
|
|
"type": "text/plain", |
|
|
"type": "text/plain", |
|
|
"title": "Plain text", |
|
|
"title": "Plain text", |
|
|
} |
|
|
} |
|
|
) |
|
|
) |
|
|
if args.all or args.html: |
|
|
if args.all or args.html: |
|
|
links.append( |
|
|
links.append( |
|
|
{ |
|
|
{ |
|
|
"href": versionbaseurl + ".raw.html", |
|
|
"href": versionbaseurl + ".raw.html", |
|
|
"rel": "alternate", |
|
|
"rel": "alternate", |
|
|
"type": "text/html", |
|
|
"type": "text/html", |
|
|
"title": "HTML", |
|
|
"title": "HTML", |
|
|
} |
|
|
} |
|
|
) |
|
|
) |
|
|
if args.all or args.dhtml: |
|
|
if args.all or args.dhtml: |
|
|
links.append( |
|
|
links.append( |
|
|
{ |
|
|
{ |
|
|
"href": versionbaseurl + ".diff.html", |
|
|
"href": versionbaseurl + ".diff.html", |
|
|
"rel": "alternate", |
|
|
"rel": "alternate", |
|
|
"type": "text/html", |
|
|
"type": "text/html", |
|
|
"title": "HTML with author colors", |
|
|
"title": "HTML with author colors", |
|
|
} |
|
|
} |
|
|
) |
|
|
) |
|
|
if args.all or args.meta: |
|
|
if args.all or args.meta: |
|
|
links.append( |
|
|
links.append( |
|
|
{ |
|
|
{ |
|
|
"href": versionbaseurl + ".meta.json", |
|
|
"href": versionbaseurl + ".meta.json", |
|
|
"rel": "alternate", |
|
|
"rel": "alternate", |
|
|
"type": "application/json", |
|
|
"type": "application/json", |
|
|
"title": "Meta data", |
|
|
"title": "Meta data", |
|
|
} |
|
|
} |
|
|
) |
|
|
) |
|
|
|
|
|
|
|
|
if args.all or args.dhtml: |
|
|
if args.all or args.dhtml: |
|
|
data["startRev"] = "0" |
|
|
data["startRev"] = "0" |
|
|
url = info["localapiurl"] + "createDiffHTML?" + urlencode(data) |
|
|
url = info["localapiurl"] + "createDiffHTML?" + urlencode(data) |
|
|
html = await agetjson(session, url) |
|
|
html = await agetjson(session, url) |
|
|
ver = {"type": "diffhtml"} |
|
|
ver = {"type": "diffhtml"} |
|
|
versions.append(ver) |
|
|
versions.append(ver) |
|
|
ver["code"] = html["_code"] |
|
|
ver["code"] = html["_code"] |
|
|
if html["_code"] == 200: |
|
|
if html["_code"] == 200: |
|
|
try: |
|
|
try: |
|
|
html = html["data"]["html"] |
|
|
html = html["data"]["html"] |
|
|
ver["path"] = p + ".diff.html" |
|
|
ver["path"] = p + ".diff.html" |
|
|
ver["url"] = quote(ver["path"]) |
|
|
ver["url"] = quote(ver["path"]) |
|
|
doc = html5lib.parse( |
|
|
doc = html5lib.parse( |
|
|
html, treebuilder="etree", namespaceHTMLElements=False |
|
|
html, treebuilder="etree", namespaceHTMLElements=False |
|
|
) |
|
|
) |
|
|
html5tidy( |
|
|
html5tidy( |
|
|
doc, |
|
|
doc, |
|
|
indent=True, |
|
|
indent=True, |
|
|
title=padid, |
|
|
title=padid, |
|
|
scripts=args.script, |
|
|
scripts=args.script, |
|
|
links=links, |
|
|
links=links, |
|
|
) |
|
|
) |
|
|
async with await trio.open_file(ver["path"], "w") as f: |
|
|
async with await trio.open_file(ver["path"], "w") as f: |
|
|
output = ET.tostring(doc, method="html", encoding="unicode") |
|
|
output = ET.tostring(doc, method="html", encoding="unicode") |
|
|
await f.write(output) |
|
|
await f.write(output) |
|
|
except TypeError: |
|
|
except TypeError: |
|
|
ver["message"] = html["message"] |
|
|
ver["message"] = html["message"] |
|
|
|
|
|
|
|
|
# Process text, html, dhtml, all options |
|
|
# Process text, html, dhtml, all options |
|
|
if args.all or args.html: |
|
|
if args.all or args.html: |
|
|
url = info["localapiurl"] + "getHTML?" + urlencode(data) |
|
|
# mb: line causing the error of not writing the correct HTML content to the correct HTML file: |
|
|
html = await agetjson(session, url) |
|
|
# url = info["localapiurl"] + "getHTML?" + urlencode(data) |
|
|
ver = {"type": "html"} |
|
|
# mb: warning, HACK! Catching the error by writing the API request url manually ... |
|
|
versions.append(ver) |
|
|
url = info["localapiurl"] + "getHTML?" + "padID=" + padid + "&" + 'apikey=' + data["apikey"] + '&startRev=0' |
|
|
ver["code"] = html["_code"] |
|
|
# print(url) |
|
|
if html["_code"] == 200: |
|
|
html = await agetjson(session, url) |
|
|
html = html["data"]["html"] |
|
|
ver = {"type": "html"} |
|
|
ver["path"] = p + ".raw.html" |
|
|
versions.append(ver) |
|
|
ver["url"] = quote(ver["path"]) |
|
|
# mb: warning, HACK! Added a Try and Except here, as it sometimes bumped into an error, stopping the pull. |
|
|
doc = html5lib.parse( |
|
|
try: |
|
|
html, treebuilder="etree", namespaceHTMLElements=False |
|
|
ver["code"] = html["_code"] |
|
|
) |
|
|
if html["_code"] == 200: |
|
|
html5tidy( |
|
|
try: |
|
|
doc, indent=True, title=padid, scripts=args.script, links=links, |
|
|
html = html["data"]["html"] |
|
|
) |
|
|
ver["path"] = p + ".raw.html" |
|
|
async with await trio.open_file(ver["path"], "w") as f: |
|
|
ver["url"] = quote(ver["path"]) |
|
|
output = ET.tostring(doc, method="html", encoding="unicode") |
|
|
doc = html5lib.parse( |
|
|
await f.write(output) |
|
|
html, treebuilder="etree", namespaceHTMLElements=False |
|
|
|
|
|
) |
|
|
# output meta |
|
|
html5tidy( |
|
|
if args.all or args.meta: |
|
|
doc, indent=True, title=padid, scripts=args.script, links=links, |
|
|
ver = {"type": "meta"} |
|
|
) |
|
|
versions.append(ver) |
|
|
async with await trio.open_file(ver["path"], "w") as f: |
|
|
ver["path"] = metapath |
|
|
output = ET.tostring(doc, method="html", encoding="unicode") |
|
|
ver["url"] = quote(metapath) |
|
|
await f.write(output) |
|
|
async with await trio.open_file(metapath, "w") as f: |
|
|
except TypeError: |
|
|
await f.write(json.dumps(meta)) |
|
|
ver["message"] = html["message"] |
|
|
|
|
|
except Exception as exception: |
|
|
print("[x] {} (saved)".format(padid)) |
|
|
print("PANIC: {}".format(exception)) |
|
|
saved += 1 |
|
|
|
|
|
return |
|
|
# output meta |
|
|
|
|
|
if args.all or args.meta: |
|
|
|
|
|
ver = {"type": "meta"} |
|
|
|
|
|
versions.append(ver) |
|
|
|
|
|
ver["path"] = metapath |
|
|
|
|
|
ver["url"] = quote(metapath) |
|
|
|
|
|
async with await trio.open_file(metapath, "w") as f: |
|
|
|
|
|
await f.write(json.dumps(meta)) |
|
|
|
|
|
|
|
|
|
|
|
print("[x] {} (saved)".format(padid)) |
|
|
|
|
|
saved += 1 |
|
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async def handle_pads(args): |
|
|
async def handle_pads(args): |
|
|
global skipped, saved |
|
|
global skipped, saved |
|
|
|
|
|
|
|
|
session = asks.Session(connections=args.connection) |
|
|
session = asks.Session(connections=args.connection) |
|
|
info = loadpadinfo(args.padinfo) |
|
|
info = loadpadinfo(args.padinfo) |
|
|
data = {"apikey": info["apikey"]} |
|
|
data = {"apikey": info["apikey"]} |
|
|
|
|
|
|
|
|
padids = await get_padids(args, info, data, session) |
|
|
padids = await get_padids(args, info, data, session) |
|
|
if args.skip: |
|
|
if args.skip: |
|
|
padids = padids[args.skip : len(padids)] |
|
|
padids = padids[args.skip : len(padids)] |
|
|
|
|
|
|
|
|
print("=" * 79) |
|
|
print("=" * 79) |
|
|
print("Etherpump is warming up the engines ...") |
|
|
print("Etherpump is warming up the engines ...") |
|
|
print("=" * 79) |
|
|
print("=" * 79) |
|
|
|
|
|
|
|
|
start = time.time() |
|
|
start = time.time() |
|
|
async with trio.open_nursery() as nursery: |
|
|
async with trio.open_nursery() as nursery: |
|
|
for padid in padids: |
|
|
for padid in padids: |
|
|
nursery.start_soon(handle_pad, args, padid, data, info, session) |
|
|
nursery.start_soon(handle_pad, args, padid, data, info, session) |
|
|
end = time.time() |
|
|
end = time.time() |
|
|
timeit = round(end - start, 2) |
|
|
timeit = round(end - start, 2) |
|
|
|
|
|
|
|
|
print("=" * 79) |
|
|
print("=" * 79) |
|
|
print( |
|
|
print( |
|
|
"Processed {} :: Skipped {} :: Saved {} :: Time {}s".format( |
|
|
"Processed {} :: Skipped {} :: Saved {} :: Time {}s".format( |
|
|
len(padids), skipped, saved, timeit |
|
|
len(padids), skipped, saved, timeit |
|
|
) |
|
|
) |
|
|
) |
|
|
) |
|
|
print("=" * 79) |
|
|
print("=" * 79) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def main(args): |
|
|
def main(args): |
|
|
p = build_argument_parser(args) |
|
|
p = build_argument_parser(args) |
|
|
args = p.parse_args(args) |
|
|
args = p.parse_args(args) |
|
|
trio.run(handle_pads, args) |
|
|
trio.run(handle_pads, args) |
|
|