Browse Source

updated help strings of commands

add-quote-import
Michael Murtaugh 8 years ago
parent
commit
cd207dc8f8
  1. 27
      bin/etherdump
  2. 2
      etherdump/commands/creatediffhtml.py
  3. 30
      etherdump/commands/diffhtml.py
  4. 3
      etherdump/commands/dumpcsv.py
  5. 2
      etherdump/commands/gethtml.py
  6. 2
      etherdump/commands/gettext.py
  7. 2
      etherdump/commands/list.py
  8. 2
      etherdump/commands/listauthors.py
  9. 2
      etherdump/commands/revisionscount.py
  10. 2
      etherdump/commands/showmeta.py
  11. 16
      etherdump/commands/sync.py

27
bin/etherdump

@ -2,6 +2,25 @@
import sys
usage = """Usage:
etherdump CMD
where CMD could be:
sync
gettext
gethtml
creatediffhtml
dumpcsv
list
listauthors
revisionscount
showmeta
For more information on each command try:
etherdump CMD --help
"""
try:
cmd = sys.argv[1]
if cmd.startswith("-"):
@ -10,13 +29,13 @@ try:
else:
args = sys.argv[2:]
except IndexError:
cmd = "sync"
args = sys.argv[1:]
print usage
sys.exit(0)
try:
# http://stackoverflow.com/questions/301134/dynamic-module-import-in-python
cmdmod = __import__("etherdump.commands.%s" % cmd, fromlist=["etherdump.commands"])
cmdmod.main(args)
except ImportError, e:
print "Command '{0}' not available ({1})".format(cmd, e)
print "Error performing command '{0}'\n(python said: {1})\n".format(cmd, e)
print usage

2
etherdump/commands/creatediffhtml.py

@ -7,7 +7,7 @@ from urllib2 import urlopen, HTTPError, URLError
def main(args):
p = ArgumentParser("")
p = ArgumentParser("calls the createDiffHTML API function for the given padid")
p.add_argument("padid", help="the padid")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--showurl", default=False, action="store_true")

30
etherdump/commands/diffhtml.py

@ -1,30 +0,0 @@
#!/usr/bin/env python
from argparse import ArgumentParser
import json
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
def main(args):
p = ArgumentParser("")
p.add_argument("padid", help="the padid")
p.add_argument("--startrev", type=int, default=0, help="starting revision")
p.add_argument("--endrev", type=int, default=None, help="ending revision, default: last")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--showurl", default=False, action="store_true")
args = p.parse_args(args)
with open(args.padinfo) as f:
info = json.load(f)
apiurl = "{0[protocol]}://{0[hostname]}:{0[port]}{0[apiurl]}{0[apiversion]}/".format(info)
data = {}
data['apikey'] = info['apikey']
data['padID'] = args.padid.encode("utf-8")
data['startRev'] = "{0}".format(args.startrev)
if args.endrev != None:
data['endRev'] = "{0}".format(args.endrev)
requesturl = apiurl+'createDiffHTML?'+urlencode(data)
if args.showurl:
print requesturl
else:
print json.load(urlopen(requesturl))['data']['html']

3
etherdump/commands/dumpcsv.py

@ -30,9 +30,8 @@ def jsonload (url):
return json.loads(data)
def main (args):
p = ArgumentParser("")
p = ArgumentParser("outputs a CSV of information all all pads")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--format", default="csv", help="output format: csv (default), json")
p.add_argument("--zerorevs", default=False, action="store_true", help="include pads with zero revisions, default: False")
args = p.parse_args(args)

2
etherdump/commands/gethtml.py

@ -7,7 +7,7 @@ from urllib2 import urlopen, HTTPError, URLError
def main(args):
p = ArgumentParser("")
p = ArgumentParser("calls the getHTML API function for the given padid")
p.add_argument("padid", help="the padid")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--showurl", default=False, action="store_true")

2
etherdump/commands/gettext.py

@ -7,7 +7,7 @@ from urllib2 import urlopen, HTTPError, URLError
def main(args):
p = ArgumentParser("")
p = ArgumentParser("calls the getText API function for the given padid")
p.add_argument("padid", help="the padid")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--showurl", default=False, action="store_true")

2
etherdump/commands/list.py

@ -6,7 +6,7 @@ from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
def main (args):
p = ArgumentParser("")
p = ArgumentParser("call listAllPads and print the results")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--showurl", default=False, action="store_true")
p.add_argument("--format", default="lines", help="output format: lines, json; default lines")

2
etherdump/commands/listauthors.py

@ -7,7 +7,7 @@ from urllib2 import urlopen, HTTPError, URLError
def main(args):
p = ArgumentParser("")
p = ArgumentParser("call listAuthorsOfPad for the padid")
p.add_argument("padid", help="the padid")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--showurl", default=False, action="store_true")

2
etherdump/commands/revisionscount.py

@ -6,7 +6,7 @@ from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
def main(args):
p = ArgumentParser("")
p = ArgumentParser("call getRevisionsCount for the given padid")
p.add_argument("padid", help="the padid")
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--showurl", default=False, action="store_true")

2
etherdump/commands/showmeta.py

@ -9,7 +9,7 @@ Extract and output selected fields of metadata
"""
def main (args):
p = ArgumentParser("")
p = ArgumentParser("extract & display meta data from a specific .meta.json file, or for a given padid (nb: it still looks for a .meta.json file)")
p.add_argument("--path", default=None, help="read from a meta.json file")
p.add_argument("--padid", default=None, help="read meta for this padid")
p.add_argument("--format", default="{padid}", help="format str, default: {padid}")

16
etherdump/commands/sync.py

@ -29,17 +29,17 @@ def load_padinfo(p):
def main (args):
p = ArgumentParser("")
p = ArgumentParser("Check for pads that have changed since last sync (according to .meta.json)")
p.add_argument("padid", nargs="*", default=[])
p.add_argument("--padinfo", default="padinfo.json", help="padinfo, default: padinfo.json")
p.add_argument("--zerorevs", default=False, action="store_true", help="include pads with zero revisions, default: False")
p.add_argument("--pub", default="pub", help="pub path for output, default: pub")
p.add_argument("--group", default="g", help="group path for output, default: g")
p.add_argument("--zerorevs", default=False, action="store_true", help="include pads with zero revisions, default: False (i.e. pads with no revisions are skipped)")
p.add_argument("--pub", default="pub", help="folder to store files for public pads, default: pub")
p.add_argument("--group", default="g", help="folder to store files for group pads, default: g")
p.add_argument("--skip", default=None, type=int, help="skip this many items, default: None")
p.add_argument("--meta", default=False, action="store_true", help="download meta to file, default: False")
p.add_argument("--text", default=False, action="store_true", help="download text to file, default: False")
p.add_argument("--html", default=False, action="store_true", help="download html to file, default: False")
p.add_argument("--dhtml", default=False, action="store_true", help="download dhtml to file, default: False")
p.add_argument("--meta", default=False, action="store_true", help="download meta to PADID.meta.json, default: False")
p.add_argument("--text", default=False, action="store_true", help="download text to PADID.txt, default: False")
p.add_argument("--html", default=False, action="store_true", help="download html to PADID.html, default: False")
p.add_argument("--dhtml", default=False, action="store_true", help="download dhtml to PADID.dhtml, default: False")
p.add_argument("--all", default=False, action="store_true", help="download all files (meta, text, html, dhtml), default: False")
args = p.parse_args(args)

Loading…
Cancel
Save