Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

WIP: Python 3 integration branch #2888

Closed
wants to merge 48 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
d46bd94
from web.browser import AppBrowser
cclauss Jan 20, 2020
6497b0a
from web.browser import AppBrowser
cclauss Jan 20, 2020
07f7dbb
from openlibrary.catalog.marc import mnemonics
cclauss Jan 20, 2020
9279fe6
from openlibrary.core import formats
cclauss Jan 20, 2020
7d37ee3
from openlibrary.core import helpers
cclauss Jan 20, 2020
c819b29
from openlibrary.core.readableurls import ReadableUrlProcessor
cclauss Jan 20, 2020
c880526
from openlibrary.core import cache, geo_ip, iprange
cclauss Jan 20, 2020
bb0b80f
from openlibrary.core.processors.readableurls import ReadableUrlProce…
cclauss Jan 20, 2020
98bd644
from openlibrary.catalog.marc.marc_base import MarcBase, MarcExceptio…
cclauss Jan 20, 2020
8e361c9
from six import StringIO
cclauss Jan 20, 2020
721b4ad
from openlibrary.catalog.marc.get_subjects import subjects_for_work
cclauss Jan 20, 2020
7375c7c
from openlibrary.catalog.marc.marc_base import MarcBase, MarcException
cclauss Jan 20, 2020
a489a1a
from openlibrary.catalog.merge.normalize import normalize
cclauss Jan 20, 2020
0d28217
iptools==0.7.0; python_version >= '3.0'
cclauss Jan 20, 2020
807e960
from openlibrary.catalog.works.find_works import ...
cclauss Jan 20, 2020
8f3475f
from scripts import _init_path
cclauss Jan 20, 2020
625a9b6
from openlibrary.core import helpers
cclauss Jan 20, 2020
09fab8c
from six.moves.http_client import HTTPConnection
cclauss Jan 20, 2020
ea05f61
from openlibrary.solr.data_provider import get_data_provider
cclauss Jan 20, 2020
afc9808
from six import StringIO
cclauss Jan 20, 2020
2fb9b53
from infogami.utils.view import public, render_template
cclauss Jan 20, 2020
b0a65bf
isort imports, from six.moves.html_parser import HTMLParser
cclauss Jan 20, 2020
a07b457
from six import StringIO
cclauss Jan 20, 2020
c0ccae0
from six.moves.collections_abc import Mapping
cclauss Jan 20, 2020
ccdd0e9
Remove unused import httplib
cclauss Jan 20, 2020
3dd16c7
import six.moves.http_client as httplib
cclauss Jan 20, 2020
19e374d
from openlibrary.plugins.upstream import acs4
cclauss Jan 20, 2020
e859980
from openlibrary.plugins.upstream import utils
cclauss Jan 20, 2020
dc3b19f
from openlibrary.plugins.upstream import spamcheck, utils
cclauss Jan 20, 2020
f255414
import openlibrary.plugins.openlibrary.filters as stats_filters
cclauss Jan 20, 2020
c51ec0d
from openlibrary.core.lists.model import ListMixin, Seed
cclauss Jan 20, 2020
5691d76
from openlibrary.plugins.upstream import borrow, forms, utils
cclauss Jan 20, 2020
2319895
from openlibrary.plugins.worksearch import searchapi, subjects
cclauss Jan 20, 2020
48bd7ad
from openlibrary.plugins.search import facet_hash, solr_client, stopword
cclauss Jan 20, 2020
750adf4
from openlibrary.plugins.openlibrary.libraries import LoanStats
cclauss Jan 20, 2020
6af0ff8
from six import StringIO
cclauss Jan 20, 2020
aec8521
from six import StringIO
cclauss Jan 20, 2020
1a6e857
from six import StringIO
cclauss Jan 20, 2020
8411010
from six import StringIO
cclauss Jan 20, 2020
ab50837
from openlibrary.plugins.importapi import import_edition_builder
cclauss Jan 21, 2020
48b5ebf
Update import_rdf.py
cclauss Jan 21, 2020
27886df
from openlibrary.plugins.importapi.import_edition_builder import impo…
cclauss Jan 21, 2020
eb90a71
urllib.splittype(), urllib.splithost(), urllib.splitport() were remov…
cclauss Jan 21, 2020
14513c0
Update warc.py
cclauss Jan 21, 2020
4094f7f
Merge branch 'master' into from-web.browser-import-AppBrowser
cclauss Jan 21, 2020
aa5f814
Update test_find_works.py
cclauss Jan 21, 2020
777326f
Update marc_binary.py
cclauss Jan 21, 2020
2e8d1bc
Revert changes to mock_ol.py
cclauss Jan 28, 2020
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions openlibrary/catalog/marc/marc_xml.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from lxml import etree
from marc_base import MarcBase, MarcException
from unicodedata import normalize

import six
from lxml import etree

from openlibrary.catalog.marc.marc_base import MarcBase, MarcException

data_tag = '{http://www.loc.gov/MARC21/slim}datafield'
control_tag = '{http://www.loc.gov/MARC21/slim}controlfield'
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/catalog/merge/names.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import print_function
import re
from normalize import normalize
from openlibrary.catalog.merge.normalize import normalize

re_split_parts = re.compile('(.*?[. ]+)')
re_marc_name = re.compile('^(.*), (.*)$')
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/catalog/oca/parse.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from __future__ import print_function
import sys
from cStringIO import StringIO
from six import StringIO
from xml.parsers.expat import error as xml_error
from elementtree import ElementTree
from types import *
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/core/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import sys
import time

import formats
from openlibrary.core import formats

class Init:
"""Init process for starting and managing OL services.
Expand Down
4 changes: 1 addition & 3 deletions openlibrary/core/inlibrary.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
"""Tools for in-library lending.
"""
import web
import cache
import iprange
import geo_ip
from infogami.utils import delegate
from openlibrary.core import cache, geo_ip, iprange

def _get_libraries(site=None):
"""Returns all the libraries each as a dict."""
Expand Down
4 changes: 2 additions & 2 deletions openlibrary/core/middleware.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""WSGI middleware used in Open Library.
"""
import web
import StringIO
from six import StringIO
import gzip

class GZipMiddleware:
Expand All @@ -23,7 +23,7 @@ def get_response_header(name, default=None):
return default

def compress(text, level=9):
f = StringIO.StringIO()
f = StringIO()
gz = gzip.GzipFile(None, 'wb', level, fileobj=f)
gz.write(text)
gz.close()
Expand Down
16 changes: 7 additions & 9 deletions openlibrary/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,19 @@
import iptools
from infogami.infobase import client

import helpers as h
from openlibrary.core import helpers as h

#TODO: fix this. openlibrary.core should not import plugins.
from openlibrary import accounts
from openlibrary.utils import extract_numeric_id_from_olid
from openlibrary.plugins.upstream.utils import get_history
from openlibrary.core.helpers import private_collection_in
from openlibrary.core import db, cache, iprange, inlibrary, loanstats, waitinglist, lending
from openlibrary.core.bookshelves import Bookshelves
from openlibrary.core.helpers import private_collection_in
from openlibrary.core.lists.model import ListMixin, Seed
from openlibrary.core.ratings import Ratings
from openlibrary.utils.isbn import to_isbn_13, isbn_13_to_isbn_10
from openlibrary.core.vendors import create_edition_from_amazon_metadata

# relative imports
from lists.model import ListMixin, Seed
from . import db, cache, iprange, inlibrary, loanstats, waitinglist, lending
from openlibrary.plugins.upstream.utils import get_history
from openlibrary.utils import extract_numeric_id_from_olid
from openlibrary.utils.isbn import to_isbn_13, isbn_13_to_isbn_10

from six.moves import urllib

Expand Down
2 changes: 1 addition & 1 deletion openlibrary/core/olmarkdown.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
the javascript markdown editor used in OL.
"""

import helpers as h
from openlibrary.core import helpers as h
import re
from infogami.utils.markdown import markdown

Expand Down
2 changes: 1 addition & 1 deletion openlibrary/core/processors/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from readableurls import ReadableUrlProcessor
from openlibrary.core.processors.readableurls import ReadableUrlProcessor
2 changes: 1 addition & 1 deletion openlibrary/coverstore/tests/test_code.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from .. import code
from cStringIO import StringIO
from six import StringIO
import web
import datetime

Expand Down
31 changes: 18 additions & 13 deletions openlibrary/coverstore/warc.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,19 @@
http://archive-access.sourceforge.net/warc/warc_file_format-0.10.html
"""

import urllib
import httplib
import datetime

from six.moves.http_client import HTTPConnection
from six.moves.urllib.parse import urlparse

WARC_VERSION = "0.10"
CRLF = "\r\n"

class WARCReader:
"""Reader to read records from a warc file.

>>> import StringIO
>>> f = StringIO.StringIO()
>>> from six import StringIO
>>> f = StringIO()
>>> r1 = WARCRecord("resource", "subject_uri", "image/jpeg", {"hello": "world"}, "foo")
>>> r2 = WARCRecord("resource", "subject_uri", "image/jpeg", {"hello": "world"}, "bar")
>>> w = WARCWriter(f)
Expand Down Expand Up @@ -111,7 +112,7 @@ def _readuntil(self, condition):

def read(self, size):
protocol, host, port, path = self.urlsplit(self.url)
conn = httplib.HTTPConnection(host, port)
conn = HTTPConnection(host, port)
headers = {'Range': 'bytes=%d-%d' % (self.offset, self.offset + size - 1)}
conn.request('GET', path, None, headers)
response = conn.getresponse()
Expand All @@ -123,13 +124,16 @@ def urlsplit(self, url):
"""Splits url into protocol, host, port and path.

>>> f = HTTPFile('')
>>> f.urlsplit("http://www.google.com")
('http', 'www.google.com', None, '')
>>> f.urlsplit("http://www.google.com/search?q=hello")
('http', 'www.google.com', None, '/search?q=hello')
>>> f.urlsplit("http://www.google.com:80/search?q=hello")
('http', 'www.google.com', 80, '/search?q=hello')
"""
protocol, rest = urllib.splittype(url)
hostport, path = urllib.splithost(rest)
host, port = urllib.splitport(hostport)
return protocol, host, port, path
p = urlparse(url)
fullpath = "?".join((p.path, p.query)) if (p.path or p.query) else ""
return p.scheme, p.hostname, p.port, fullpath

class WARCHeader:
r"""WARCHeader class represents the header in the WARC file format.
Expand Down Expand Up @@ -219,9 +223,9 @@ def __repr__(self):
class LazyWARCRecord(WARCRecord):
"""Class to create WARCRecord lazily.

>>> import StringIO
>>> from six import StringIO
>>> r1 = WARCRecord("resource", "subject_uri", "image/jpeg", {"hello": "world"}, "foo bar", creation_date="20080808080808", record_id="record_42")
>>> f = StringIO.StringIO(str(r1))
>>> f = StringIO(str(r1))
>>> offset = len(str(r1.get_header()))
>>> r2 = LazyWARCRecord(f, offset, r1.get_header())
>>> r1 == r2
Expand All @@ -247,8 +251,9 @@ def get_data(self):
class WARCWriter:
r"""Writes to write warc records to file.

>>> import re, StringIO
>>> f = StringIO.StringIO()
>>> import re
>>> from six import StringIO
>>> f = StringIO()
>>> r1 = WARCRecord("resource", "subject_uri", "image/jpeg", {"hello": "world"}, "foo", creation_date="20080808080808", record_id="record_42")
>>> r2 = WARCRecord("resource", "subject_uri", "image/jpeg", {"hello": "world"}, "bar", creation_date="20080808090909", record_id="record_43")
>>> w = WARCWriter(f)
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/plugins/importapi/import_opds.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
OL Import API OPDS parser
"""

import import_edition_builder
from openlibrary.plugins.importapi import import_edition_builder

import six

Expand Down
2 changes: 1 addition & 1 deletion openlibrary/plugins/importapi/import_rdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
OL Import API RDF parser
"""

import import_edition_builder
from openlibrary.plugins.importapi import import_edition_builder

import six

Expand Down
2 changes: 1 addition & 1 deletion openlibrary/plugins/importapi/metaxml_to_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
"""
from __future__ import print_function

from import_edition_builder import import_edition_builder
from openlibrary.plugins.importapi.import_edition_builder import import_edition_builder

def parse_collection(collection):
collection_dict = {
Expand Down
6 changes: 2 additions & 4 deletions openlibrary/plugins/inside/code.py
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@

from time import time
import re
import simplejson
import httplib
import logging

import web
from infogami.utils import delegate
from infogami.utils.view import render_template
from openlibrary.core.fulltext import fulltext_search


from six.moves import urllib

import six.moves.http_client as httplib

RESULTS_PER_PAGE = 20


class search_inside(delegate.page):

path = '/search/inside'
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/plugins/openlibrary/borrow_home.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,9 @@

from openlibrary.core import helpers as h
from openlibrary.core import inlibrary
from openlibrary.plugins.openlibrary.libraries import LoanStats
from openlibrary.plugins.worksearch.subjects import SubjectEngine

from libraries import LoanStats

class borrow(delegate.page):
path = "/borrow"
Expand Down
2 changes: 1 addition & 1 deletion openlibrary/plugins/openlibrary/libraries.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import logging
import datetime
import itertools
from cStringIO import StringIO
from six import StringIO
import csv
import simplejson

Expand Down
7 changes: 3 additions & 4 deletions openlibrary/plugins/openlibrary/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,13 @@
import logging
import traceback

from openlibrary.core import stats as graphite_stats

import web
from infogami import config
from infogami.utils import stats
import openlibrary.core.stats

import filters as stats_filters
from openlibrary.core import stats as graphite_stats
import openlibrary.core.stats
import openlibrary.plugins.openlibrary.filters as stats_filters

l = logging.getLogger("openlibrary.stats")

Expand Down
29 changes: 12 additions & 17 deletions openlibrary/plugins/search/code.py
Original file line number Diff line number Diff line change
@@ -1,26 +1,23 @@
from __future__ import print_function
import web
import stopword
import pdb

from infogami import utils
from infogami.utils import delegate
from infogami.infobase.client import Thing
from infogami.utils import view, template
from infogami import config
from infogami.plugins.api.code import jsonapi

import pdb
import re
import web
import solr_client
import time
import simplejson
from collections import defaultdict
from functools import partial
from gzip import open as gzopen
import cPickle
from collections import defaultdict

import simplejson
import six
import web
from infogami import config, utils
from infogami.infobase.client import Thing
from infogami.plugins.api.code import jsonapi
from infogami.utils import delegate, template, view
from six.moves import cPickle

from openlibrary.plugins.search import facet_hash, solr_client, stopword
from openlibrary.plugins.search.collapse import collapse_groups

render = template.render

Expand Down Expand Up @@ -70,7 +67,6 @@ def lookup_ocaid(ocaid):
w = web.ctx.site.get(ocat[0]) if ocat else None
return w

from collapse import collapse_groups
class fullsearch(delegate.page):
def POST(self):
errortext = None
Expand Down Expand Up @@ -135,7 +131,6 @@ class Result_nums: pass

GET = POST

import facet_hash
facet_token = view.public(facet_hash.facet_token)

class Timestamp(object):
Expand Down
5 changes: 3 additions & 2 deletions openlibrary/plugins/search/solr_client.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
#!/usr/bin/python
from xml.etree.cElementTree import ElementTree
from cStringIO import StringIO
import os
import re
from collections import defaultdict
import cgi
import web
import simplejson
from facet_hash import facet_token
import pdb

import six
from six import StringIO
from six.moves.urllib.parse import quote_plus
from six.moves.urllib.request import urlopen

from openlibrary.plugins.search.facet_hash import facet_token

php_location = "/petabox/setup.inc"

# server_addr = ('pharosdb.us.archive.org', 8983)
Expand Down
16 changes: 6 additions & 10 deletions openlibrary/plugins/upstream/account.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,15 @@
import infogami.core.code as core

from openlibrary import accounts
from openlibrary.i18n import gettext as _
from openlibrary.core import helpers as h, lending
from openlibrary.core.bookshelves import Bookshelves
from openlibrary.plugins.recaptcha import recaptcha
from openlibrary.plugins import openlibrary as olib
from openlibrary.accounts import (
audit_accounts, Account, OpenLibraryAccount, InternetArchiveAccount, valid_email)
from openlibrary.core import helpers as h, lending
from openlibrary.core.bookshelves import Bookshelves
from openlibrary.core.sponsorships import get_sponsored_editions

import forms
import utils
import borrow

from openlibrary.i18n import gettext as _
from openlibrary.plugins import openlibrary as olib
from openlibrary.plugins.recaptcha import recaptcha
from openlibrary.plugins.upstream import borrow, forms, utils

from six.moves import range
from six.moves import urllib
Expand Down
Loading