diff --git a/faapi/base.py b/faapi/base.py index 109ac92..48298ba 100644 --- a/faapi/base.py +++ b/faapi/base.py @@ -11,11 +11,11 @@ from requests import Session from .connection import CookieDict -from .connection import Response from .connection import get from .connection import get_robots from .connection import join_url from .connection import make_session +from .connection import Response from .connection import stream_binary from .exceptions import DisallowedPath from .exceptions import Unauthorized @@ -132,8 +132,10 @@ def get(self, path: str, **params: Union[str, bytes, int, float]) -> Response: self.handle_delay() return get(self.session, path, timeout=self.timeout, params=params) - def get_parsed(self, path: str, *, skip_page_check: bool = False, skip_auth_check: bool = False, - **params: Union[str, bytes, int, float]) -> BeautifulSoup: + def get_parsed( + self, path: str, *, skip_page_check: bool = False, skip_auth_check: bool = False, + **params: Union[str, bytes, int, float] + ) -> BeautifulSoup: """ Fetch a path with a GET request and parse it using BeautifulSoup. @@ -170,8 +172,9 @@ def frontpage(self) -> list[SubmissionPartial]: submissions: list[SubmissionPartial] = [SubmissionPartial(f) for f in parse_submission_figures(page_parsed)] return sorted({s for s in submissions}, reverse=True) - def submission(self, submission_id: int, get_file: bool = False, *, chunk_size: Optional[int] = None - ) -> tuple[Submission, Optional[bytes]]: + def submission( + self, submission_id: int, get_file: bool = False, *, chunk_size: Optional[int] = None + ) -> tuple[Submission, Optional[bytes]]: """ Fetch a submission and, optionally, its file. @@ -298,7 +301,8 @@ def watchlist_to(self, user: str, page: int = 1) -> tuple[list[UserPartial], Opt """ users: list[UserPartial] = [] us, np = parse_watchlist( - self.get_parsed(join_url("watchlist", "to", quote(username_url(user)), page), skip_auth_check=True)) + self.get_parsed(join_url("watchlist", "to", quote(username_url(user))), page=page, skip_auth_check=True) + ) for s, u in us: _user: UserPartial = UserPartial() _user.name = u @@ -315,7 +319,8 @@ def watchlist_by(self, user: str, page: int = 1) -> tuple[list[UserPartial], Opt """ users: list[UserPartial] = [] us, np = parse_watchlist( - self.get_parsed(join_url("watchlist", "by", quote(username_url(user)), page), skip_auth_check=True)) + self.get_parsed(join_url("watchlist", "by", quote(username_url(user))), page=page, skip_auth_check=True) + ) for s, u in us: _user: UserPartial = UserPartial() _user.name = u diff --git a/faapi/comment.py b/faapi/comment.py index 0c50be7..ab02dec 100644 --- a/faapi/comment.py +++ b/faapi/comment.py @@ -16,8 +16,10 @@ class Comment: Contains comment information and references to replies and parent objects. """ - def __init__(self, tag: Optional[Tag] = None, - parent: Optional[Union[faapi.submission.Submission, faapi.journal.Journal]] = None): + def __init__( + self, tag: Optional[Tag] = None, + parent: Optional[Union[faapi.submission.Submission, faapi.journal.Journal]] = None + ): """ :param tag: The comment tag from which to parse information :param parent: The parent object of the comment diff --git a/faapi/connection.py b/faapi/connection.py index bc91f24..7635a4e 100644 --- a/faapi/connection.py +++ b/faapi/connection.py @@ -14,8 +14,8 @@ from requests import Session from .__version__ import __version__ -from .exceptions import Unauthorized from .exceptions import _raise_exception +from .exceptions import Unauthorized root: str = "https://www.furaffinity.net" @@ -49,13 +49,17 @@ def get_robots(session: Session) -> RobotFileParser: return robots -def get(session: Session, path: str, *, timeout: Optional[int] = None, - params: Optional[dict[str, Union[str, bytes, int, float]]] = None) -> Response: +def get( + session: Session, path: str, *, timeout: Optional[int] = None, + params: Optional[dict[str, Union[str, bytes, int, float]]] = None +) -> Response: return session.get(join_url(root, path), params=params, timeout=timeout) -def stream_binary(session: Session, url: str, *, chunk_size: Optional[int] = None, - timeout: Optional[int] = None) -> bytes: +def stream_binary( + session: Session, url: str, *, chunk_size: Optional[int] = None, + timeout: Optional[int] = None +) -> bytes: stream: Response = session.get(url, stream=True, timeout=timeout) stream.raise_for_status() diff --git a/faapi/parse.py b/faapi/parse.py index d4cace5..3cbaf15 100644 --- a/faapi/parse.py +++ b/faapi/parse.py @@ -31,7 +31,6 @@ relative_url: Pattern = re_compile(r"^(?:https?://(?:www\.)?furaffinity\.net)?(.*)") mentions_regexp: Pattern = re_compile(r"^(?:(?:https?://)?(?:www\.)?furaffinity\.net)?/user/([^/#]+).*$") url_username_regexp: Pattern = re_compile(r"/(?:user|gallery|scraps|favorites|journals|commissions)/([^/]+)(/.*)?") -watchlist_next_regexp: Pattern = re_compile(r"/watchlist/(?:by|to)/[^/]+/(\d+)") not_found_messages: tuple[str, ...] = ("not in our database", "cannot be found", "could not be found", "user not found") deactivated_messages: tuple[str, ...] = ("deactivated", "pending deletion") smilie_icons: tuple[str, ...] = ( @@ -156,23 +155,23 @@ def html_to_bbcode(html: str) -> str: quote_tag.replace_with("[quote]", *quote_tag.children, "[/quote]") for [selector, bbcode_tag] in ( - ("i", "i"), - ("b", "b"), - ("strong", "b"), - ("u", "u"), - ("s", "s"), - ("code.bbcode_left", "left"), - ("code.bbcode_center", "center"), - ("code.bbcode_right", "right"), - ("span.bbcode_spoiler", "spoiler"), - ("sub", "sub"), - ("sup", "sup"), - ("h1", "h1"), - ("h2", "h2"), - ("h3", "h3"), - ("h4", "h4"), - ("h5", "h5"), - ("h6", "h6"), + ("i", "i"), + ("b", "b"), + ("strong", "b"), + ("u", "u"), + ("s", "s"), + ("code.bbcode_left", "left"), + ("code.bbcode_center", "center"), + ("code.bbcode_right", "right"), + ("span.bbcode_spoiler", "spoiler"), + ("sub", "sub"), + ("sup", "sup"), + ("h1", "h1"), + ("h2", "h2"), + ("h3", "h3"), + ("h4", "h4"), + ("h5", "h5"), + ("h6", "h6"), ): for tag in body.select(selector): tag.replace_with(f"[{bbcode_tag}]", *tag.children, f"[/{bbcode_tag}]") @@ -199,15 +198,15 @@ def html_to_bbcode(html: str) -> str: bbcode = sub("^ *", "", bbcode, flags=MULTILINE) for char, substitution in ( - ("©", "(c)"), - ("™", "(tm)"), - ("®", "(r)"), - ("©", "(c)"), - ("®", "(tm)"), - ("™", "(r)"), - ("<", "<"), - (">", ">"), - ("&", "&"), + ("©", "(c)"), + ("™", "(tm)"), + ("®", "(r)"), + ("©", "(c)"), + ("®", "(tm)"), + ("™", "(r)"), + ("<", "<"), + (">", ">"), + ("&", "&"), ): bbcode = bbcode.replace(char, substitution) @@ -877,9 +876,11 @@ def parse_user_journals(journals_page: BeautifulSoup) -> dict[str, Any]: } -def parse_watchlist(watch_page: BeautifulSoup) -> tuple[list[tuple[str, str]], int]: - tag_next: Optional[Tag] = watch_page.select_one("section div.floatright form[method=get]") - match_next: Optional[Match] = watchlist_next_regexp.match(get_attr(tag_next, "action")) if tag_next else None +def parse_watchlist(watch_page: BeautifulSoup) -> tuple[list[tuple[str, str]], Optional[int]]: + tag_next: Optional[Tag] = watch_page.select_one( + 'section div.floatright form[method="get"] input[name="next"][value]' + ) + next_page: Optional[int] = int(get_attr(tag_next, "value")) if tag_next else None watches: list[tuple[str, str]] = [] @@ -894,4 +895,4 @@ def parse_watchlist(watch_page: BeautifulSoup) -> tuple[list[tuple[str, str]], i watches.append((status, username)) - return watches, int(match_next[1]) if match_next else 0 + return watches, next_page diff --git a/faapi/submission.py b/faapi/submission.py index 866d8b2..9755573 100644 --- a/faapi/submission.py +++ b/faapi/submission.py @@ -6,12 +6,12 @@ from .connection import root from .exceptions import _raise_exception from .parse import BeautifulSoup -from .parse import Tag from .parse import check_page_raise from .parse import html_to_bbcode from .parse import parse_comments from .parse import parse_submission_figure from .parse import parse_submission_page +from .parse import Tag from .user import UserPartial diff --git a/faapi/user.py b/faapi/user.py index f179408..6264669 100644 --- a/faapi/user.py +++ b/faapi/user.py @@ -7,16 +7,20 @@ from .connection import root from .exceptions import _raise_exception from .parse import BeautifulSoup -from .parse import Tag from .parse import check_page_raise from .parse import html_to_bbcode from .parse import parse_user_page from .parse import parse_user_tag +from .parse import Tag from .parse import username_url -class UserStats(namedtuple("UserStats", ["views", "submissions", "favorites", "comments_earned", - "comments_made", "journals", "watched_by", "watching"])): +class UserStats( + namedtuple( + "UserStats", ["views", "submissions", "favorites", "comments_earned", + "comments_made", "journals", "watched_by", "watching"] + ) +): """ This object contains a user's statistics: * views