Migrate to Python 3

This commit is contained in:
Luke Murphy 2019-09-25 18:52:52 +02:00
parent d978c942f0
commit 6fd24eb6cb
No known key found for this signature in database
GPG Key ID: 5E2EF5A63E3718CC
20 changed files with 95 additions and 78 deletions

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python
from __future__ import print_function
from argparse import ArgumentParser
import json, os

View File

@ -1,15 +1,16 @@
from __future__ import print_function
import re, os, json, sys
from math import ceil, floor
from time import sleep
try:
# python2
from urlparse import urlparse, urlunparse
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urllib import quote_plus, unquote_plus
from htmlentitydefs import name2codepoint
from urllib.parse import urlparse, urlunparse
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
from urllib.parse import quote_plus, unquote_plus
from html.entities import name2codepoint
input = raw_input
except ImportError:
@ -24,12 +25,12 @@ def splitpadname (padid):
if m:
return(m.group(1), padid[m.end():])
else:
return (u"", padid)
return ("", padid)
def padurl (padid, ):
return padid
def padpath (padid, pub_path=u"", group_path=u"", normalize=False):
def padpath (padid, pub_path="", group_path="", normalize=False):
g, p = splitpadname(padid)
# if type(g) == unicode:
# g = g.encode("utf-8")
@ -48,7 +49,7 @@ def padpath (padid, pub_path=u"", group_path=u"", normalize=False):
return os.path.join(pub_path, p)
def padpath2id (path):
if type(path) == unicode:
if type(path) == str:
path = path.encode("utf-8")
dd, p = os.path.split(path)
gname = dd.split("/")[-1]
@ -95,7 +96,7 @@ def progressbar (i, num, label="", file=sys.stderr):
percentage = int(floor(p*100))
bars = int(ceil(p*20))
bar = ("*"*bars) + ("-"*(20-bars))
msg = u"\r{0} {1}/{2} {3}... ".format(bar, (i+1), num, label)
msg = "\r{0} {1}/{2} {3}... ".format(bar, (i+1), num, label)
sys.stderr.write(msg)
sys.stderr.flush()
@ -114,15 +115,15 @@ def unescape(text):
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
return chr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
return chr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(name2codepoint[text[1:-1]])
text = chr(name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is

View File

@ -1,8 +1,9 @@
from __future__ import print_function
from argparse import ArgumentParser
import json
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
def main(args):

View File

@ -1,8 +1,9 @@
from __future__ import print_function
from argparse import ArgumentParser
import json
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
def main(args):

View File

@ -1,9 +1,10 @@
from __future__ import print_function
from argparse import ArgumentParser
import sys, json, re
from datetime import datetime
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
from csv import writer
from math import ceil, floor
@ -52,7 +53,7 @@ def main (args):
percentage = int(floor(p*100))
bars = int(ceil(p*20))
bar = ("*"*bars) + ("-"*(20-bars))
msg = u"\r{0} {1}/{2} {3}... ".format(bar, (i+1), numpads, padid)
msg = "\r{0} {1}/{2} {3}... ".format(bar, (i+1), numpads, padid)
if len(msg) > maxmsglen:
maxmsglen = len(msg)
sys.stderr.write("\r{0}".format(" "*maxmsglen))
@ -63,7 +64,7 @@ def main (args):
groupname = m.group(1)
padidnogroup = padid[m.end():]
else:
groupname = u""
groupname = ""
padidnogroup = padid
data['padID'] = padid.encode("utf-8")
@ -75,7 +76,7 @@ def main (args):
lastedited_raw = jsonload(apiurl+'getLastEdited?'+urlencode(data))['data']['lastEdited']
lastedited_iso = datetime.fromtimestamp(int(lastedited_raw)/1000).isoformat()
author_ids = jsonload(apiurl+'listAuthorsOfPad?'+urlencode(data))['data']['authorIDs']
author_ids = u" ".join(author_ids).encode("utf-8")
author_ids = " ".join(author_ids).encode("utf-8")
out.writerow((padidnogroup.encode("utf-8"), groupname.encode("utf-8"), revisions, lastedited_iso, author_ids))
count += 1

View File

@ -1,8 +1,9 @@
from __future__ import print_function
from argparse import ArgumentParser
import json
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
def main(args):

View File

@ -1,10 +1,11 @@
from __future__ import print_function
from argparse import ArgumentParser
import json, sys
try:
# python2
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
except ImportError:
# python3
from urllib.parse import urlencode

View File

@ -1,6 +1,6 @@
#!/usr/bin/env python3
from __future__ import print_function
from html5lib import parse
import os, sys
from argparse import ArgumentParser

View File

@ -1,4 +1,4 @@
from __future__ import print_function
from argparse import ArgumentParser
import sys, json, re, os, time
from datetime import datetime
@ -6,9 +6,10 @@ import dateutil.parser
try:
# python2
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urlparse import urlparse, urlunparse
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
from urllib.parse import urlparse, urlunparse
except ImportError:
# python3
from urllib.parse import urlparse, urlunparse, urlencode, quote
@ -182,9 +183,9 @@ def main (args):
padmeta["lastedited_822"] = d.strftime("%a, %d %b %Y %H:%M:%S +0000")
return padmeta
pads = map(loadmeta, inputs)
pads = list(map(loadmeta, inputs))
pads = [x for x in pads if x != None]
pads = map(fixdates, pads)
pads = list(map(fixdates, pads))
args.pads = list(pads)
def could_have_base (x, y):

View File

@ -1,11 +1,12 @@
from __future__ import print_function
from argparse import ArgumentParser
try:
# python2
from urlparse import urlparse, urlunparse
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urllib.parse import urlparse, urlunparse
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
input = raw_input
except ImportError:
# python3

View File

@ -1,8 +1,9 @@
from __future__ import print_function
from argparse import ArgumentParser
import json, os, re
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
def group (items, key=lambda x: x):
ret = []

View File

@ -1,13 +1,14 @@
from __future__ import print_function
from argparse import ArgumentParser
import json
import sys
from etherpump.commands.common import getjson
try:
# python2
from urlparse import urlparse, urlunparse
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urllib.parse import urlparse, urlunparse
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
input = raw_input
except ImportError:
# python3

View File

@ -1,8 +1,9 @@
from __future__ import print_function
from argparse import ArgumentParser
import json
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
def main(args):

View File

@ -1,4 +1,4 @@
from __future__ import print_function
from argparse import ArgumentParser
import sys, json, re, os, time
from datetime import datetime
@ -7,9 +7,10 @@ import pypandoc
try:
# python2
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urlparse import urlparse, urlunparse
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
from urllib.parse import urlparse, urlunparse
except ImportError:
# python3
from urllib.parse import urlparse, urlunparse, urlencode, quote
@ -183,9 +184,9 @@ def main (args):
padmeta["lastedited_822"] = d.strftime("%a, %d %b %Y %H:%M:%S +0000")
return padmeta
pads = map(loadmeta, inputs)
pads = list(map(loadmeta, inputs))
pads = [x for x in pads if x != None]
pads = map(fixdates, pads)
pads = list(map(fixdates, pads))
args.pads = list(pads)
def could_have_base (x, y):

View File

@ -1,12 +1,13 @@
from __future__ import print_function
from argparse import ArgumentParser
import sys, json, re, os
from datetime import datetime
try:
# python2
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
except ImportError:
# python3
from urllib.parse import urlencode, quote

View File

@ -1,8 +1,9 @@
from __future__ import print_function
from argparse import ArgumentParser
import json
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
def main(args):
p = ArgumentParser("call getRevisionsCount for the given padid")

View File

@ -1,8 +1,9 @@
from __future__ import print_function
from argparse import ArgumentParser
import json, sys
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
import requests

View File

@ -1,11 +1,12 @@
from __future__ import print_function
from argparse import ArgumentParser
import json, sys
try:
# python2
from urllib2 import urlopen, URLError, HTTPError
from urllib import urlencode
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
except ImportError:
# python3
from urllib.parse import urlencode, quote

View File

@ -1,7 +1,7 @@
from __future__ import print_function
from argparse import ArgumentParser
import json, sys, re
from common import *
from .common import *
"""
Extract and output selected fields of metadata

View File

@ -1,11 +1,12 @@
from __future__ import print_function
from argparse import ArgumentParser
import sys, json, re, os
from datetime import datetime
from urllib import urlencode
from urllib2 import urlopen, HTTPError, URLError
from urllib.parse import urlencode
from urllib.request import urlopen
from urllib.error import HTTPError, URLError
from math import ceil, floor
from common import *
from .common import *
"""
status (meta):
@ -95,7 +96,7 @@ def main (args):
pad = PadItem(path=p)
padsbypath[pad.path] = pad
pads = padsbypath.values()
pads = list(padsbypath.values())
pads.sort(key=lambda x: (x.status, x.padid))
curstat = None