Browse Source

changes

add-quote-import
Michael Murtaugh 9 years ago
parent
commit
81e761c403
  1. 36
      etherdump/commands/common.py
  2. 46
      etherdump/commands/pull.py

36
etherdump/commands/common.py

@ -1,6 +1,7 @@
import re, os import re, os, json, sys
from urllib import quote_plus, unquote_plus from urllib import quote_plus, unquote_plus
from math import ceil, floor
from urllib2 import urlopen
groupnamepat = re.compile(r"^g\.(\w+)\$") groupnamepat = re.compile(r"^g\.(\w+)\$")
def splitpadname (padid): def splitpadname (padid):
@ -23,3 +24,34 @@ def padpath (padid, pub_path=u"", group_path=u""):
return os.path.join(group_path, g, p) return os.path.join(group_path, g, p)
else: else:
return os.path.join(pub_path, p) return os.path.join(pub_path, p)
def padpath2id (path):
if type(path) == unicode:
path = path.encode("utf-8")
dd, p = os.path.split(path)
gname = dd.split("/")[-1]
p = unquote_plus(p)
if gname:
return "{0}${1}".format(gname, p).decode("utf-8")
else:
return p.decode("utf-8")
def getjson (url):
f = urlopen(url)
data = f.read()
f.close()
return json.loads(data)
def loadpadinfo(p):
with open(p) as f:
info = json.load(f)
return info
def progressbar (i, num, label="", file=sys.stderr):
p = float(i) / num
percentage = int(floor(p*100))
bars = int(ceil(p*20))
bar = ("*"*bars) + ("-"*(20-bars))
msg = u"\r{0} {1}/{2} {3}... ".format(bar, (i+1), num, label)
sys.stderr.write(msg.encode("utf-8"))
sys.stderr.flush()

46
etherdump/commands/sync.py → etherdump/commands/pull.py

@ -4,29 +4,16 @@ from argparse import ArgumentParser
import sys, json, re, os import sys, json, re, os
from datetime import datetime from datetime import datetime
from urllib import urlencode from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError from urllib2 import HTTPError
from math import ceil, floor
from common import * from common import *
""" """
sync(meta): pull(meta):
Update meta data files for those that have changed. Update meta data files for those that have changed.
Check for changed pads by looking at revisions & comparing to existing Check for changed pads by looking at revisions & comparing to existing
""" """
def jsonload (url):
f = urlopen(url)
data = f.read()
f.close()
return json.loads(data)
def load_padinfo(p):
with open(p) as f:
info = json.load(f)
return info
def main (args): def main (args):
p = ArgumentParser("Check for pads that have changed since last sync (according to .meta.json)") p = ArgumentParser("Check for pads that have changed since last sync (according to .meta.json)")
p.add_argument("padid", nargs="*", default=[]) p.add_argument("padid", nargs="*", default=[])
@ -42,14 +29,14 @@ def main (args):
p.add_argument("--all", default=False, action="store_true", help="download all files (meta, text, html, dhtml), default: False") p.add_argument("--all", default=False, action="store_true", help="download all files (meta, text, html, dhtml), default: False")
args = p.parse_args(args) args = p.parse_args(args)
info = load_padinfo(args.padinfo) info = loadpadinfo(args.padinfo)
data = {} data = {}
data['apikey'] = info['apikey'] data['apikey'] = info['apikey']
if args.padid: if args.padid:
padids = args.padid padids = args.padid
else: else:
padids = jsonload(info['apiurl']+'listAllPads?'+urlencode(data))['data']['padIDs'] padids = getjson(info['apiurl']+'listAllPads?'+urlencode(data))['data']['padIDs']
padids.sort() padids.sort()
numpads = len(padids) numpads = len(padids)
# maxmsglen = 0 # maxmsglen = 0
@ -57,16 +44,7 @@ def main (args):
for i, padid in enumerate(padids): for i, padid in enumerate(padids):
if args.skip != None and i<args.skip: if args.skip != None and i<args.skip:
continue continue
p = (float(i) / numpads) progressbar(i, numpads, padid)
percentage = int(floor(p*100))
bars = int(ceil(p*20))
bar = ("*"*bars) + ("-"*(20-bars))
msg = u"\r{0} {1}/{2} {3}... ".format(bar, (i+1), numpads, padid)
# if len(msg) > maxmsglen:
# maxmsglen = len(msg)
# sys.stderr.write("\r{0}".format(" "*maxmsglen))
sys.stderr.write(msg.encode("utf-8"))
sys.stderr.flush()
data['padID'] = padid.encode("utf-8") data['padID'] = padid.encode("utf-8")
p = padpath(padid, args.pub, args.group) p = padpath(padid, args.pub, args.group)
@ -79,14 +57,14 @@ def main (args):
if os.path.exists(metapath): if os.path.exists(metapath):
with open(metapath) as f: with open(metapath) as f:
meta = json.load(f) meta = json.load(f)
revisions = jsonload(info['apiurl']+'getRevisionsCount?'+urlencode(data))['data']['revisions'] revisions = getjson(info['apiurl']+'getRevisionsCount?'+urlencode(data))['data']['revisions']
if meta['revisions'] == revisions: if meta['revisions'] == revisions:
skip=True skip=True
break break
meta = {'padid': padid.encode("utf-8")} meta = {'padid': padid.encode("utf-8")}
if revisions == None: if revisions == None:
meta['revisions'] = jsonload(info['apiurl']+'getRevisionsCount?'+urlencode(data))['data']['revisions'] meta['revisions'] = getjson(info['apiurl']+'getRevisionsCount?'+urlencode(data))['data']['revisions']
else: else:
meta['revisions' ] = revisions meta['revisions' ] = revisions
@ -98,9 +76,9 @@ def main (args):
# todo: load more metadata! # todo: load more metadata!
meta['pad'], meta['group'] = splitpadname(padid) meta['pad'], meta['group'] = splitpadname(padid)
meta['pathbase'] = p meta['pathbase'] = p
meta['lastedited_raw'] = int(jsonload(info['apiurl']+'getLastEdited?'+urlencode(data))['data']['lastEdited']) meta['lastedited_raw'] = int(getjson(info['apiurl']+'getLastEdited?'+urlencode(data))['data']['lastEdited'])
meta['lastedited_iso'] = datetime.fromtimestamp(int(meta['lastedited_raw'])/1000).isoformat() meta['lastedited_iso'] = datetime.fromtimestamp(int(meta['lastedited_raw'])/1000).isoformat()
meta['author_ids'] = jsonload(info['apiurl']+'listAuthorsOfPad?'+urlencode(data))['data']['authorIDs'] meta['author_ids'] = getjson(info['apiurl']+'listAuthorsOfPad?'+urlencode(data))['data']['authorIDs']
break break
except HTTPError as e: except HTTPError as e:
tries += 1 tries += 1
@ -128,20 +106,20 @@ def main (args):
# Process text, html, dhtml, all options # Process text, html, dhtml, all options
if args.all or args.text: if args.all or args.text:
text = jsonload(info['apiurl']+'getText?'+urlencode(data)) text = getjson(info['apiurl']+'getText?'+urlencode(data))
text = text['data']['text'] text = text['data']['text']
with open(p+".txt", "w") as f: with open(p+".txt", "w") as f:
f.write(text.encode("utf-8")) f.write(text.encode("utf-8"))
if args.all or args.html: if args.all or args.html:
html = jsonload(info['apiurl']+'getHTML?'+urlencode(data)) html = getjson(info['apiurl']+'getHTML?'+urlencode(data))
html = html['data']['html'] html = html['data']['html']
with open(p+".html", "w") as f: with open(p+".html", "w") as f:
f.write(html.encode("utf-8")) f.write(html.encode("utf-8"))
if args.all or args.dhtml: if args.all or args.dhtml:
data['startRev'] = "0" data['startRev'] = "0"
html = jsonload(info['apiurl']+'createDiffHTML?'+urlencode(data)) html = getjson(info['apiurl']+'createDiffHTML?'+urlencode(data))
html = html['data']['html'] html = html['data']['html']
with open(p+".diff.html", "w") as f: with open(p+".diff.html", "w") as f:
f.write(html.encode("utf-8")) f.write(html.encode("utf-8"))
Loading…
Cancel
Save