Skip to content

Commit

Permalink
ver. 1.1.1
Browse files Browse the repository at this point in the history
- bugfix
  • Loading branch information
anezih committed Dec 17, 2023
1 parent 820ef1b commit bace48e
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 9 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
**/*.zip
.vscode
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,12 @@ This plugin tries to parse metadata and cover information from kitapyurdu.com, a

<details>
<summary><b>Changelog:</b></summary>
<b>Ver. 1.1.1</b>
<li>Bugfix.</li>
<b>Ver. 1.1.0</b>
<li>Set minimum Calibre version.</li>
<li>Added an option to append extra metadata to the end of the description.</li>
<li>Try to fetch metadata with <b>kitapyurdu</b> identifier first.</li>
<b>Ver. 1.0.0</b>
<li>Initial release</li>
</details>
</details>
20 changes: 12 additions & 8 deletions __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,12 +84,16 @@ def to_calibre_metadata(self, append_extra: bool = False):
mi.source_relevance = self.source_relevance
return mi

class KitapyurduMetadataParser():
class KitapyurduMetadataParser():
def __init__(self, query, limit, logger, identifers: dict = {}) -> None:
self.query = query
self.max_results = limit
self.logger = logger
self.br = mechanize.Browser()
self.br.set_handle_robots(False)
self.br.addheaders = [
('User-Agent', 'APIs-Google (+https://developers.google.com/webmasters/APIs-Google.html)'),
]
self.ky_ident = None
ky_ident = identifers.get("kitapyurdu")
if ky_ident:
Expand Down Expand Up @@ -183,7 +187,7 @@ def parse_pages(self, only_ident: bool = False):
jbox = cover_url_with_res.select_one("a.js-jbox-book-cover")["href"]
cover_url = jbox[:jbox.index("wh") - 1]
metadata.cover_url = [cover_url]

if metadata.cover_url:
metadata.cover_id = metadata.cover_url[0].split(":")[-1]

Expand Down Expand Up @@ -222,7 +226,7 @@ def parse_pages(self, only_ident: bool = False):
isbn = attrs_table.get("ISBN:")
if isbn:
metadata.isbn = isbn

lang = attrs_table.get("Dil:")
if lang:
# lang = lang.replace("I","ı").replace("İ", "i")
Expand All @@ -249,7 +253,7 @@ class Kitapyurdu(Source):
name = "Kitapyurdu"
author = "Nezih <https://github.com/anezih>"
description = _("Downloads metadata and covers from kitapyurdu.com")
version = (1, 1, 0)
version = (1, 1, 1)
minimum_calibre_version = (6, 10, 0)
supported_platforms = ["windows", "osx", "linux"]
capabilities = frozenset(["identify", "cover"])
Expand Down Expand Up @@ -290,7 +294,7 @@ def __init__(self, *args, **kwargs):

def get_book_url_name(self, idtype, idval, url):
return "Kitapyurdu"

def get_book_url(self, identifiers):
kitapyurdu_id = identifiers.get("kitapyurdu")
if kitapyurdu_id:
Expand All @@ -316,7 +320,7 @@ def build_query(self, log, title=None, authors=None, only_title=False, rm_accent
return all
else:
return None

def create_metadata_list(self, log, title=None, authors=None, identifiers={}):
metadata_list: list[KitapyurduMetadata] = []
ky_ident = identifiers.get("kitapyurdu")
Expand All @@ -340,7 +344,7 @@ def create_metadata_list(self, log, title=None, authors=None, identifiers={}):
return metadata_list
else:
return None

def identify(self, log, result_queue, abort, title=None, authors=None, identifiers={}, timeout=30):
if abort.is_set():
return
Expand All @@ -351,7 +355,7 @@ def identify(self, log, result_queue, abort, title=None, authors=None, identifie
for relevance, mi in enumerate(metadata_list, start=1):
mi.source_relevance = relevance
result_queue.put(mi.to_calibre_metadata(self.append_extra))

def get_cached_cover_url(self, identifiers):
_id = identifiers.get('kitapyurdu_kapak')
if _id:
Expand Down

0 comments on commit bace48e

Please sign in to comment.