From 2ecbda4b1bc9064058c6e2189a2f3d6b71a2ed1c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Justen=20=28=40turicas=29?= Date: Sun, 19 May 2024 21:26:02 -0300 Subject: [PATCH 001/115] Add console_scripts to setup.cfg --- setup.cfg | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.cfg b/setup.cfg index 77478cb..2cffba5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -24,6 +24,10 @@ packages = find: python_requires = >=3.7 install_requires = file: requirements/base.txt +[options.entry_points] +console_scripts = + youtool = youtool:cli + [options.extras_require] cli = file: requirements/cli.txt dev = file: requirements/dev.txt From 252ff46e14cc221b07bda07843aa94934e9d6162 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Justen=20=28=40turicas=29?= Date: Sun, 19 May 2024 21:28:04 -0300 Subject: [PATCH 002/115] Implement draft CLI module --- youtool/cli.py | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 youtool/cli.py diff --git a/youtool/cli.py b/youtool/cli.py new file mode 100644 index 0000000..be0bbd0 --- /dev/null +++ b/youtool/cli.py @@ -0,0 +1,44 @@ +import argparse + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument("--api-key") + subparsers = parser.add_subparsers(required=True, dest="command") + + api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") + + cmd_channel_id = subparsers.add_parser("channel-id", help="Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs)") + cmd_channel_info = subparsers.add_parser("channel-info", help="Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output (same schema for `channel` dicts)") + cmd_video_info = subparsers.add_parser("video-info", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (same schema for `video` dicts)") + cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") + cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") + cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") + cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") + + args = parser.parse_args() + + if args.command == "channel-id": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "channel-info": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-info": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-search": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-comments": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-livechat": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-transcription": + print(f"Implement: {args.command}") # TODO: implement + + +if __name__ == "__main__": + main() From dcc9e2f16ed3db17258108292b63f71d93773f97 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Justen=20=28=40turicas=29?= Date: Sun, 19 May 2024 21:30:52 -0300 Subject: [PATCH 003/115] Add old/draft CLI search code --- youtool/cli.py | 114 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) diff --git a/youtool/cli.py b/youtool/cli.py index be0bbd0..ff675a3 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -1,4 +1,6 @@ import argparse +import os +import sys def main(): @@ -29,6 +31,118 @@ def main(): elif args.command == "video-search": print(f"Implement: {args.command}") # TODO: implement + exit(1) + + # TODO: update code below based on new YouTube class API + import rows + from loguru import logger + from tqdm import tqdm + + from youtool import YouTube + + parser = argparse.ArgumentParser() + parser.add_argument("--key") + parser.add_argument("csv_filename") + parser.add_argument("url", nargs="+") + args = parser.parse_args() + + key = args.key or os.environ.get("YOUTUBE_API_KEY") + if not key: + print("ERROR: Must provide an API key (--key or YOUTUBE_API_KEY env var)", file=sys.stderr) + exit(1) + + if not Path(args.csv_filename).parent.exists(): + Path(args.csv_filename).parent.mkdir(parents=True) + writer = rows.utils.CsvLazyDictWriter(args.csv_filename) # TODO: use csv + yt = YouTube(key) + videos_urls = [] + channels = {} + for url in tqdm(args.url, desc="Retrieving channel IDs"): + url = url.strip() + if "/watch?" in url: + videos_urls.append(url) + continue + channel_id = yt.channel_id_from_url(url) + if not channel_id: + username = url.split("youtube.com/")[1].split("?")[0].split("/")[0] + logger.warning(f"Channel ID not found for URL {url}") + continue + channels[channel_id] = { + "id": channel_id, + "url": url, + } + for channel_id, playlist_id in yt.playlists_ids(list(channels.keys())).items(): + channels[channel_id]["playlist_id"] = playlist_id + fields = "id duration definition status views likes dislikes favorites comments channel_id title description published_at scheduled_to finished_at concurrent_viewers started_at".split() + # TODO: check fields + for data in tqdm(channels.values(), desc="Retrieving videos"): + try: + for video_batch in ipartition(yt.playlist_videos(data["playlist_id"]), 50): + for video in yt.videos_infos([row["id"] for row in video_batch]): + writer.writerow({field: video.get(field) for field in fields}) + except RuntimeError: # Cannot find playlist + continue + videos_ids = (video_url.split("watch?v=")[1].split("&")[0] for video_url in videos_urls) + for video in tqdm(yt.videos_infos(videos_ids), desc="Retrieving individual videos"): + writer.writerow({field: video.get(field) for field in fields}) + writer.close() + + # SEARCH + now = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) + timezone_br = datetime.timezone(offset=datetime.timedelta(hours=-3)) + now_br = now.astimezone(timezone_br) + search_start = (now - datetime.timedelta(hours=1)).replace(minute=0, second=0, microsecond=0) + search_stop = search_start + datetime.timedelta(hours=1) + + parent = Path(__file__).parent + parser = argparse.ArgumentParser() + parser.add_argument("--keys-filename", default=parent / "youtube-keys.csv") + parser.add_argument("--terms-filename", default=parent / "search-terms.csv") + parser.add_argument("--channels-filename", default=parent / "search-channels.csv") + parser.add_argument("--start", default=str(search_start)) + parser.add_argument("--stop", default=str(search_stop)) + parser.add_argument("--limit", type=int, default=20) + parser.add_argument("--order", default="viewCount") + parser.add_argument("data_path") + args = parser.parse_args() + + data_path = Path(args.data_path) + keys_filename = Path(args.keys_filename) + terms_filename = Path(args.terms_filename) + channels_filename = Path(args.channels_filename) + now_path_name = now_br.strftime("%Y-%m-%dT%H") + youtube_keys = read_keys(keys_filename) + channels_groups = read_channels(args.channels_filename) + search_start, search_stop = args.start, args.stop + if isinstance(search_start, str): + search_start = datetime.datetime.fromisoformat(search_start) + if isinstance(search_stop, str): + search_stop = datetime.datetime.fromisoformat(search_stop) + search_start_str = search_start.astimezone(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + search_stop_str = search_stop.astimezone(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") + search_limit = args.limit + search_order = args.order + terms_categories = read_csv_dictlist(terms_filename, "categoria", "termo") + + print(search_start_str) + print(search_stop_str) + + search_start_br = search_start.astimezone(timezone_br) + result_filename = data_path / f"search_{search_start_br.strftime('%Y-%m-%dT%H')}.csv" + writer = rows.utils.CsvLazyDictWriter(result_filename) + search_results = youtube_search( + terms_categories=terms_categories, + keys=youtube_keys["search"], + start=search_start_str, + stop=search_stop_str, + channels_groups=channels_groups, + order=search_order, + limit=search_limit, + ) + for result in search_results: + writer.writerow(result) + writer.close() + elif args.command == "video-comments": print(f"Implement: {args.command}") # TODO: implement From f2540a8784ebc7bd884d22f08840d806523e9022 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Justen=20=28=40turicas=29?= Date: Sat, 8 Jun 2024 14:13:49 -0300 Subject: [PATCH 004/115] Add useful scripts (to be added to utils and CLI) --- scripts/channel_data.py | 187 ++++++++++++++++++++++++++++++++++++++++ scripts/clean_vtt.py | 43 +++++++++ 2 files changed, 230 insertions(+) create mode 100644 scripts/channel_data.py create mode 100644 scripts/clean_vtt.py diff --git a/scripts/channel_data.py b/scripts/channel_data.py new file mode 100644 index 0000000..e00b965 --- /dev/null +++ b/scripts/channel_data.py @@ -0,0 +1,187 @@ +# pip install youtool[livechat,transcription] +import argparse +import os +import json +import shelve +from pathlib import Path + +from chat_downloader.errors import ChatDisabled, LoginRequired, NoChatReplay +from tqdm import tqdm +from youtool import YouTube + + +class CsvLazyDictWriter: # Got and adapted from + """Lazy CSV dict writer, so you don't need to specify field names beforehand + + This class is almost the same as `csv.DictWriter` with the following + differences: + + - You don't need to pass `fieldnames` (it's extracted on the first + `.writerow` call); + - You can pass either a filename or a fobj (like `sys.stdout`); + """ + + def __init__(self, filename_or_fobj, encoding="utf-8", *args, **kwargs): + self.writer = None + self.filename_or_fobj = filename_or_fobj + self.encoding = encoding + self._fobj = None + self.writer_args = args + self.writer_kwargs = kwargs + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + @property + def fobj(self): + if self._fobj is None: + if getattr(self.filename_or_fobj, "read", None) is not None: + self._fobj = self.filename_or_fobj + else: + self._fobj = open( + self.filename_or_fobj, mode="w", encoding=self.encoding + ) + + return self._fobj + + def writerow(self, row): + if self.writer is None: + self.writer = csv.DictWriter( + self.fobj, + fieldnames=list(row.keys()), + *self.writer_args, + **self.writer_kwargs + ) + self.writer.writeheader() + + self.writerow = self.writer.writerow + return self.writerow(row) + + def __del__(self): + self.close() + + def close(self): + if self._fobj and not self._fobj.closed: + self._fobj.close() + + +# TODO: add options to get only part of the data (not all steps) +parser = argparse.ArgumentParser() +parser.add_argument("--api-key", default=os.environ.get("YOUTUBE_API_KEY"), help="Comma-separated list of YouTube API keys to use") +parser.add_argument("username_or_channel_url", type=str) +parser.add_argument("data_path", type=Path) +parser.add_argument("language-code", default="pt-orig", help="See the list by running `yt-dlp --list-subs `") +args = parser.parse_args() + +if not args.api_key: + import sys + + print("ERROR: API key must be provided either by `--api-key` or `YOUTUBE_API_KEY` environment variable", file=sys.stderr) + exit(1) +api_keys = [key.strip() for key in args.api_key.split(",") if key.strip()] + + +username = args.username +if username.startswith("https://"): + channel_url = username + username = [item for item in username.split("/") if item][-1] +else: + channel_url = f"https://www.youtube.com/@{username}" +data_path = args.data_path +channel_csv_filename = data_path / f"{username}-channel.csv" +playlist_csv_filename = data_path / f"{username}-playlist.csv" +playlist_video_csv_filename = data_path / f"{username}-playlist-video.csv" +video_csv_filename = data_path / f"{username}-video.csv" +comment_csv_filename = data_path / f"{username}-comment.csv" +livechat_csv_filename = data_path / f"username}-livechat.csv" +language_code = args.language_code +video_transcription_path = data_path / Path(f"{username}-transcriptions") + +yt = YouTube(api_keys, disable_ipv6=True) +video_transcription_path.mkdir(parents=True, exist_ok=True) +channel_writer = CsvLazyDictWriter(channel_csv_filename) +playlist_writer = CsvLazyDictWriter(playlist_csv_filename) +video_writer = CsvLazyDictWriter(video_csv_filename) +comment_writer = CsvLazyDictWriter(comment_csv_filename) +livechat_writer = CsvLazyDictWriter(livechat_csv_filename) +playlist_video_writer = CsvLazyDictWriter(playlist_video_csv_filename) + +print("Retrieving channel info") +channel_id = yt.channel_id_from_url(channel_url) +channel_info = list(yt.channels_infos([channel_id]))[0] +channel_writer.writerow(channel_info) +channel_writer.close() + +main_playlist = { + "id": channel_info["playlist_id"], + "title": "Uploads", + "description": channel_info["description"], + "videos": channel_info["videos"], + "channel_id": channel_id, + "channel_title": channel_info["title"], + "published_at": channel_info["published_at"], + "thumbnail_url": channel_info["thumbnail_url"], +} +playlist_writer.writerow(main_playlist) +playlist_ids = [channel_info["playlist_id"]] +for playlist in tqdm(yt.channel_playlists(channel_id), desc="Retrieving channel playlists"): + playlist_writer.writerow(playlist) + playlist_ids.append(playlist["id"]) +playlist_writer.close() + +video_ids = [] +for playlist_id in tqdm(playlist_ids, desc="Retrieving playlists' videos"): + for video in yt.playlist_videos(playlist_id): + if video["id"] not in video_ids: + video_ids.append(video["id"]) + row = { + "playlist_id": playlist_id, + "video_id": video["id"], + "video_status": video["status"], + "channel_id": video["channel_id"], + "channel_title": video["channel_title"], + "playlist_channel_id": video["playlist_channel_id"], + "playlist_channel_title": video["playlist_channel_title"], + "title": video["title"], + "description": video["description"], + "published_at": video["published_at"], + "added_to_playlist_at": video["added_to_playlist_at"], + "tags": video["tags"], + } + playlist_video_writer.writerow(row) +playlist_video_writer.close() + +videos = [] +for video in tqdm(yt.videos_infos(video_ids), desc="Retrieving detailed video information"): + videos.append(video) + video_writer.writerow(video) +video_writer.close() + +for video_id in tqdm(video_ids, desc="Retrieving video comments"): + try: + for comment in yt.video_comments(video_id): + comment_writer.writerow(comment) + except RuntimeError: # Comments disabled + continue +comment_writer.close() + +print("Retrieving transcriptions") +yt.videos_transcriptions( + video_ids, + language_code=language_code, + path=video_transcription_path, + skip_downloaded=True, + batch_size=10, +) + +# TODO: live chat code will freeze if it's not available +for video_id in tqdm(video_ids, desc="Retrieving live chat"): + try: + for comment in yt.video_livechat(video_id): + livechat_writer.writerow(comment) + except (LoginRequired, NoChatReplay, ChatDisabled): + continue +livechat_writer.close() diff --git a/scripts/clean_vtt.py b/scripts/clean_vtt.py new file mode 100644 index 0000000..3412b59 --- /dev/null +++ b/scripts/clean_vtt.py @@ -0,0 +1,43 @@ +# pip install webvtt-py +import argparse +import io +import json +import os +import shelve +import time +from pathlib import Path + +import tiktoken +import webvtt +from openai import APITimeoutError, OpenAI +from rows.utils import CsvLazyDictWriter +from tqdm import tqdm + + +def vtt_clean(vtt_content, same_line=False): + result_lines, last_line = [], None + for caption in webvtt.read_buffer(io.StringIO(vtt_content)): + new_lines = caption.text.strip().splitlines() + for line in new_lines: + line = line.strip() + if not line or line == last_line: + continue + result_lines.append(f"{str(caption.start).split('.')[0]} {line}\n" if not same_line else f"{line} ") + last_line = line + return "".join(result_lines) + + +parser = argparse.ArgumentParser() +parser.add_argument("input_path", type=Path) +parser.add_argument("output_path", type=Path) +args = parser.parse_args() + +for filename in tqdm(args.input_path.glob("*.vtt")): + new_filename = args.output_path / filename.name + if new_filename.exists(): + continue + with filename.open() as fobj: + data = fobj.read() + result = vtt_clean(data) + with new_filename.open(mode="w") as fobj: + fobj.write(result) From 079e5ee2d94a781583067680c0c3f82a49cdb062 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 25 Jun 2024 11:14:23 -0300 Subject: [PATCH 005/115] - Add argparse integration and command handling for Youtube CLI Tool --- youtool/cli.py | 179 ++++++++++--------------------------------------- 1 file changed, 37 insertions(+), 142 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index ff675a3..6926185 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -1,158 +1,53 @@ import argparse import os -import sys +from commands import COMMANDS -def main(): - parser = argparse.ArgumentParser() - parser.add_argument("--api-key") - subparsers = parser.add_subparsers(required=True, dest="command") - - api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") - - cmd_channel_id = subparsers.add_parser("channel-id", help="Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs)") - cmd_channel_info = subparsers.add_parser("channel-info", help="Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output (same schema for `channel` dicts)") - cmd_video_info = subparsers.add_parser("video-info", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (same schema for `video` dicts)") - cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") - cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") - cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") - cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") - - args = parser.parse_args() - - if args.command == "channel-id": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "channel-info": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "video-info": - print(f"Implement: {args.command}") # TODO: implement - elif args.command == "video-search": - print(f"Implement: {args.command}") # TODO: implement - exit(1) - - # TODO: update code below based on new YouTube class API - import rows - from loguru import logger - from tqdm import tqdm - - from youtool import YouTube - - parser = argparse.ArgumentParser() - parser.add_argument("--key") - parser.add_argument("csv_filename") - parser.add_argument("url", nargs="+") - args = parser.parse_args() - - key = args.key or os.environ.get("YOUTUBE_API_KEY") - if not key: - print("ERROR: Must provide an API key (--key or YOUTUBE_API_KEY env var)", file=sys.stderr) - exit(1) - - if not Path(args.csv_filename).parent.exists(): - Path(args.csv_filename).parent.mkdir(parents=True) - writer = rows.utils.CsvLazyDictWriter(args.csv_filename) # TODO: use csv - yt = YouTube(key) - videos_urls = [] - channels = {} - for url in tqdm(args.url, desc="Retrieving channel IDs"): - url = url.strip() - if "/watch?" in url: - videos_urls.append(url) - continue - channel_id = yt.channel_id_from_url(url) - if not channel_id: - username = url.split("youtube.com/")[1].split("?")[0].split("/")[0] - logger.warning(f"Channel ID not found for URL {url}") - continue - channels[channel_id] = { - "id": channel_id, - "url": url, - } - for channel_id, playlist_id in yt.playlists_ids(list(channels.keys())).items(): - channels[channel_id]["playlist_id"] = playlist_id - fields = "id duration definition status views likes dislikes favorites comments channel_id title description published_at scheduled_to finished_at concurrent_viewers started_at".split() - # TODO: check fields - for data in tqdm(channels.values(), desc="Retrieving videos"): - try: - for video_batch in ipartition(yt.playlist_videos(data["playlist_id"]), 50): - for video in yt.videos_infos([row["id"] for row in video_batch]): - writer.writerow({field: video.get(field) for field in fields}) - except RuntimeError: # Cannot find playlist - continue - videos_ids = (video_url.split("watch?v=")[1].split("&")[0] for video_url in videos_urls) - for video in tqdm(yt.videos_infos(videos_ids), desc="Retrieving individual videos"): - writer.writerow({field: video.get(field) for field in fields}) - writer.close() - - # SEARCH - now = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc) - timezone_br = datetime.timezone(offset=datetime.timedelta(hours=-3)) - now_br = now.astimezone(timezone_br) - search_start = (now - datetime.timedelta(hours=1)).replace(minute=0, second=0, microsecond=0) - search_stop = search_start + datetime.timedelta(hours=1) +def main(): + """ + Main function for the YouTube CLI Tool. - parent = Path(__file__).parent - parser = argparse.ArgumentParser() - parser.add_argument("--keys-filename", default=parent / "youtube-keys.csv") - parser.add_argument("--terms-filename", default=parent / "search-terms.csv") - parser.add_argument("--channels-filename", default=parent / "search-channels.csv") - parser.add_argument("--start", default=str(search_start)) - parser.add_argument("--stop", default=str(search_stop)) - parser.add_argument("--limit", type=int, default=20) - parser.add_argument("--order", default="viewCount") - parser.add_argument("data_path") - args = parser.parse_args() + This function sets up the argument parser for the CLI tool, including options for the YouTube API key and + command-specific subparsers. It then parses the command-line arguments, retrieving the YouTube API key + from either the command-line argument '--api-key' or the environment variable 'YOUTUBE_API_KEY'. If the API + key is not provided through any means, it raises an argparse.ArgumentError. - data_path = Path(args.data_path) - keys_filename = Path(args.keys_filename) - terms_filename = Path(args.terms_filename) - channels_filename = Path(args.channels_filename) - now_path_name = now_br.strftime("%Y-%m-%dT%H") - youtube_keys = read_keys(keys_filename) - channels_groups = read_channels(args.channels_filename) - search_start, search_stop = args.start, args.stop - if isinstance(search_start, str): - search_start = datetime.datetime.fromisoformat(search_start) - if isinstance(search_stop, str): - search_stop = datetime.datetime.fromisoformat(search_stop) - search_start_str = search_start.astimezone(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") - search_stop_str = search_stop.astimezone(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ") - search_limit = args.limit - search_order = args.order - terms_categories = read_csv_dictlist(terms_filename, "categoria", "termo") + Finally, the function executes the appropriate command based on the parsed arguments. If an exception occurs + during the execution of the command, it is caught and raised as an argparse error for proper handling. - print(search_start_str) - print(search_stop_str) + Raises: + argparse.ArgumentError: If the YouTube API key is not provided. + argparse.ArgumentError: If there is an error during the execution of the command. - search_start_br = search_start.astimezone(timezone_br) - result_filename = data_path / f"search_{search_start_br.strftime('%Y-%m-%dT%H')}.csv" - writer = rows.utils.CsvLazyDictWriter(result_filename) - search_results = youtube_search( - terms_categories=terms_categories, - keys=youtube_keys["search"], - start=search_start_str, - stop=search_stop_str, - channels_groups=channels_groups, - order=search_order, - limit=search_limit, - ) - for result in search_results: - writer.writerow(result) - writer.close() + """ + parser = argparse.ArgumentParser(description="CLI Tool for managing YouTube videos add playlists") + parser.add_argument("--api-key", type=str, help="YouTube API Key", dest="api_key") + parser.add_argument("--debug", type=bool, help="Debug mode", dest="debug") + + subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed") + # cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") + # cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") + # cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") + # cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") - elif args.command == "video-comments": - print(f"Implement: {args.command}") # TODO: implement + for command in COMMANDS: + command.parse_arguments(subparsers) - elif args.command == "video-livechat": - print(f"Implement: {args.command}") # TODO: implement + args = parser.parse_args() + args.api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") - elif args.command == "video-transcription": - print(f"Implement: {args.command}") # TODO: implement + if not args.api_key: + parser.error("YouTube API Key is required") + + try: + print(args.func(**args.__dict__)) + except Exception as error: + if args.debug: + raise error + parser.error(error) if __name__ == "__main__": - main() + main() \ No newline at end of file From 4c5d15124a2f54ed56ba13e7d54ee962b7769881 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 25 Jun 2024 11:16:11 -0300 Subject: [PATCH 006/115] - Implemented method to extract URLs from a CSV file; - Implemented method to convert a list of dictionaries into a CSV file or string; --- youtool/commands/base.py | 115 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 115 insertions(+) create mode 100644 youtool/commands/base.py diff --git a/youtool/commands/base.py b/youtool/commands/base.py new file mode 100644 index 0000000..165a48f --- /dev/null +++ b/youtool/commands/base.py @@ -0,0 +1,115 @@ +import csv +import argparse + +from typing import List, Dict, Any, Self +from io import StringIO +from pathlib import Path +from datetime import datetime + + +class Command(): + """ + A base class for commands to inherit from, following a specific structure. + + Attributes: + name (str): The name of the command. + arguments (List[Dict[str, Any]]): A list of dictionaries, each representing an argument for the command. + """ + name: str + arguments: List[Dict[str, Any]] + + @classmethod + def generate_parser(cls: Self, subparsers: argparse._SubParsersAction): + """ + Creates a parser for the command and adds it to the subparsers. + + Args: + subparsers (argparse._SubParsersAction): The subparsers action to add the parser to. + + Returns: + argparse.ArgumentParser: The parser for the command. + """ + return subparsers.add_parser(cls.name, help=cls.__doc__) + + @classmethod + def parse_arguments(cls: Self, subparsers: argparse._SubParsersAction) -> None: + """ + Parses the arguments for the command and sets the command's execute method as the default function to call. + + Args: + subparsers (argparse._SubParsersAction): The subparsers action to add the parser to. + """ + parser = cls.generate_parser(subparsers) + for argument in cls.arguments: + argument_copy = {**argument} + argument_name = argument_copy.pop("name") + parser.add_argument(argument_name, **argument_copy) + parser.set_defaults(func=cls.execute) + + @classmethod + def execute(cls: Self, arguments: argparse.Namespace): + """ + Executes the command. + + This method should be overridden by subclasses to define the command's behavior. + + Args: + arguments (argparse.Namespace): The parsed arguments for the command. + """ + raise NotImplementedError() + + @staticmethod + def data_from_csv(file_path: str, data_column_name: str = None) -> List[str]: + """ + Extracts a list of URLs from a specified CSV file. + + Args: file_path (str): The path to the CSV file containing the URLs. + data_column_name (str, optional): The name of the column in the CSV file that contains the URLs. + If not provided, it defaults to `ChannelId.URL_COLUMN_NAME`. + + Returns: + List[str]: A list of URLs extracted from the specified CSV file. + + Raises: + Exception: If the file path is invalid or the file cannot be found. + """ + data = [] + + file_path = Path(file_path) + if not file_path.is_file(): + raise FileNotFoundError(f"Invalid file path: {file_path}") + + with file_path.open('r', newline='') as csv_file: + reader = csv.DictReader(csv_file) + if data_column_name not in reader.fieldnames: + raise Exception(f"Column {data_column_name} not found on {file_path}") + for row in reader: + data.append(row.get(data_column_name)) + return data + + @classmethod + def data_to_csv(cls: Self, data: List[Dict], output_file_path: str = None) -> str: + """ + Converts a list of channel IDs into a CSV file. + + Parameters: + channels_ids (List[str]): List of channel IDs to be written to the CSV. + output_file_path (str, optional): Path to the file where the CSV will be saved. If not provided, the CSV will be returned as a string. + channel_id_column_name (str, optional): Name of the column in the CSV that will contain the channel IDs. + If not provided, the default value defined in ChannelId.CHANNEL_ID_COLUMN_NAME will be used. + + Returns: + str: The path of the created CSV file or, if no path is provided, the contents of the CSV as a string. + """ + if output_file_path: + output_path = Path(output_file_path) + if output_path.is_dir(): + command_name = cls.name.replace("-", "_") + timestamp = datetime.now().strftime("%M%S%f") + output_file_path = output_path / f"{command_name}_{timestamp}.csv" + + with (Path(output_file_path).open('w', newline='') if output_file_path else StringIO()) as csv_file: + writer = csv.DictWriter(csv_file, fieldnames=list(data[0].keys()) if data else []) + writer.writeheader() + writer.writerows(data) + return str(output_file_path) if output_file_path else csv_file.getvalue() \ No newline at end of file From 943f6b07b3da175e42366e0b05270adb021f0eac Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 25 Jun 2024 11:20:00 -0300 Subject: [PATCH 007/115] - Implemented command to extract YouTube channel IDs from a list of URLs or a CSV file containing URLs; - Added commands directory structure --- youtool/commands/channel_id.py | 85 ++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) create mode 100644 youtool/commands/channel_id.py diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py new file mode 100644 index 0000000..2233d33 --- /dev/null +++ b/youtool/commands/channel_id.py @@ -0,0 +1,85 @@ +import csv + +from typing import Self + +from youtool import YouTube + +from .base import Command + + +class ChannelId(Command): + """ + Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs) + """ + name = "channel-id" + arguments = [ + {"name": "--urls", "type": str, "help": "Channels urls", "nargs": "*"}, + {"name": "--urls-file-path", "type": str, "help": "Channels urls csv file path"}, + {"name": "--output-file-path", "type": str, "help": "Output csv file path"}, + {"name": "--url-column-name", "type": str, "help": "URL column name on csv input files"}, + {"name": "--id-column-name", "type": str, "help": "Channel ID column name on csv output files"} + ] + + URL_COLUMN_NAME: str = "channel_url" + CHANNEL_ID_COLUMN_NAME: str = "channel_id" + + @classmethod + def execute(cls: Self, **kwargs) -> str: + """ + Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. + + This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. + It then saves these channel IDs to a CSV file if an output file path is specified. + + Args: + urls (list[str], optional): A list of YouTube channel URLs. Either this or urls_file_path must be provided. + urls_file_path (str, optional): Path to a CSV file containing YouTube channel URLs. + Requires url_column_name to specify the column with URLs. + output_file_path (str, optional): Path to the output CSV file where channel IDs will be saved. + If not provided, the result will be returned as a string. + api_key (str): The API key to authenticate with the YouTube Data API. + url_column_name (str, optional): The name of the column in the urls_file_path CSV file that contains the URLs. + Default is "url". + id_column_name (str, optional): The name of the column for channel IDs in the output CSV file. + Default is "channel_id". + + Returns: + str: A message indicating the result of the command. If output_file_path is specified, the message will + include the path to the generated CSV file. Otherwise, it will return the result as a string. + + Raises: + Exception: If neither urls nor urls_file_path is provided. + """ + urls = kwargs.get("urls") + urls_file_path = kwargs.get("urls_file_path") + output_file_path = kwargs.get("output_file_path") + api_key = kwargs.get("api_key") + + url_column_name = kwargs.get("url_column_name") + id_column_name = kwargs.get("id_column_name") + + if urls_file_path and not urls: + urls = cls.data_from_csv( + file_path=urls_file_path, + data_column_name=url_column_name or cls.URL_COLUMN_NAME + ) + + if not urls: + raise Exception("Either 'username' or 'url' must be provided for the channel-id command") + + youtube = YouTube([api_key], disable_ipv6=True) + + channels_ids = [ + youtube.channel_id_from_url(url) for url in urls if url + ] + + result = cls.data_to_csv( + data=[ + { + (id_column_name or cls.CHANNEL_ID_COLUMN_NAME): channel_id for channel_id in channels_ids + } + ], + output_file_path=output_file_path + ) + + return result \ No newline at end of file From b4f82e5bd4e03da73274175c544dcb91a41e6ef4 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 25 Jun 2024 11:22:28 -0300 Subject: [PATCH 008/115] - Added to the list; --- youtool/commands/__init__.py | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 youtool/commands/__init__.py diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py new file mode 100644 index 0000000..9d1c702 --- /dev/null +++ b/youtool/commands/__init__.py @@ -0,0 +1,10 @@ +from .channel_id import ChannelId + + +COMMANDS = [ + ChannelId +] + +__all__ = [ + COMMANDS, ChannelId +] \ No newline at end of file From 525015e5d7eb332efdf876f3da4a0b28ba4d9f8f Mon Sep 17 00:00:00 2001 From: Ana Paula Sales Date: Wed, 26 Jun 2024 16:42:07 -0300 Subject: [PATCH 009/115] Update cli.py fix: show error with parser if not in debug mode --- youtool/cli.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index 6926185..dce4356 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -46,8 +46,8 @@ def main(): except Exception as error: if args.debug: raise error - parser.error(error) + parser.error(error) if __name__ == "__main__": - main() \ No newline at end of file + main() From 4fba6d47b303428b8415c557d3f4c854bfaccdde Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 18:40:48 -0300 Subject: [PATCH 010/115] - Removed the type annotation from the method; - Changed file path passing to use from in the method; --- youtool/commands/base.py | 29 +++++++++++++++++------------ youtool/commands/channel_id.py | 6 +++--- 2 files changed, 20 insertions(+), 15 deletions(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 165a48f..81deb61 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -1,7 +1,7 @@ import csv import argparse -from typing import List, Dict, Any, Self +from typing import List, Dict, Any, Optional from io import StringIO from pathlib import Path from datetime import datetime @@ -19,7 +19,7 @@ class Command(): arguments: List[Dict[str, Any]] @classmethod - def generate_parser(cls: Self, subparsers: argparse._SubParsersAction): + def generate_parser(cls, subparsers: argparse._SubParsersAction): """ Creates a parser for the command and adds it to the subparsers. @@ -32,7 +32,7 @@ def generate_parser(cls: Self, subparsers: argparse._SubParsersAction): return subparsers.add_parser(cls.name, help=cls.__doc__) @classmethod - def parse_arguments(cls: Self, subparsers: argparse._SubParsersAction) -> None: + def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: """ Parses the arguments for the command and sets the command's execute method as the default function to call. @@ -47,7 +47,7 @@ def parse_arguments(cls: Self, subparsers: argparse._SubParsersAction) -> None: parser.set_defaults(func=cls.execute) @classmethod - def execute(cls: Self, arguments: argparse.Namespace): + def execute(cls, arguments: argparse.Namespace): """ Executes the command. @@ -59,36 +59,41 @@ def execute(cls: Self, arguments: argparse.Namespace): raise NotImplementedError() @staticmethod - def data_from_csv(file_path: str, data_column_name: str = None) -> List[str]: + def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> List[str]: """ Extracts a list of URLs from a specified CSV file. - Args: file_path (str): The path to the CSV file containing the URLs. - data_column_name (str, optional): The name of the column in the CSV file that contains the URLs. - If not provided, it defaults to `ChannelId.URL_COLUMN_NAME`. + Args: + file_path: The path to the CSV file containing the URLs. + data_column_name: The name of the column in the CSV file that contains the URLs. + If not provided, it defaults to `ChannelId.URL_COLUMN_NAME`. Returns: - List[str]: A list of URLs extracted from the specified CSV file. + A list of URLs extracted from the specified CSV file. Raises: Exception: If the file path is invalid or the file cannot be found. """ data = [] - file_path = Path(file_path) if not file_path.is_file(): raise FileNotFoundError(f"Invalid file path: {file_path}") with file_path.open('r', newline='') as csv_file: reader = csv.DictReader(csv_file) - if data_column_name not in reader.fieldnames: + fieldnames = reader.fieldnames + + if fieldnames is None: + raise ValueError("Fieldnames is None") + + if data_column_name not in fieldnames: raise Exception(f"Column {data_column_name} not found on {file_path}") for row in reader: data.append(row.get(data_column_name)) return data @classmethod - def data_to_csv(cls: Self, data: List[Dict], output_file_path: str = None) -> str: + def data_to_csv(cls, data: List[Dict], output_file_path: Optional[str] = None) -> str: """ Converts a list of channel IDs into a CSV file. diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index 2233d33..c648342 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -1,6 +1,6 @@ import csv -from typing import Self +from pathlib import Path from youtool import YouTube @@ -24,7 +24,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls: Self, **kwargs) -> str: + def execute(cls, **kwargs) -> str: """ Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. @@ -60,7 +60,7 @@ def execute(cls: Self, **kwargs) -> str: if urls_file_path and not urls: urls = cls.data_from_csv( - file_path=urls_file_path, + file_path=Path(urls_file_path), data_column_name=url_column_name or cls.URL_COLUMN_NAME ) From 2ba79df4234e90572c289ceb660a85f6bb980138 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 19:22:31 -0300 Subject: [PATCH 011/115] - Add changed the method signature in the class to accept (**kwargs) and return a string; - Added logic to convert values retrieved from the CSV file to strings before appending them to the data list; --- youtool/commands/base.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 81deb61..6c2ddb0 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -47,7 +47,7 @@ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: parser.set_defaults(func=cls.execute) @classmethod - def execute(cls, arguments: argparse.Namespace): + def execute(cls, **kwargs) -> str: """ Executes the command. @@ -89,7 +89,9 @@ def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> Li if data_column_name not in fieldnames: raise Exception(f"Column {data_column_name} not found on {file_path}") for row in reader: - data.append(row.get(data_column_name)) + value = row.get(data_column_name) + if value is not None: + data.append(str(value)) return data @classmethod From 8ab5185e82d4b460bc70b0807e527e9d78447d30 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 21:16:04 -0300 Subject: [PATCH 012/115] - Fixed typing error in all in the file. --- youtool/commands/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py index 9d1c702..985024e 100644 --- a/youtool/commands/__init__.py +++ b/youtool/commands/__init__.py @@ -6,5 +6,5 @@ ] __all__ = [ - COMMANDS, ChannelId + "COMMANDS", "ChannelId" ] \ No newline at end of file From 6b283205a6bde8cb04d63725da41dc8dba7f6af1 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:21:05 -0300 Subject: [PATCH 013/115] Add updates docstrings --- youtool/cli.py | 4 +--- youtool/commands/base.py | 24 +++++++++--------------- youtool/commands/channel_id.py | 10 +++------- 3 files changed, 13 insertions(+), 25 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index dce4356..7875342 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -5,8 +5,7 @@ def main(): - """ - Main function for the YouTube CLI Tool. + """Main function for the YouTube CLI Tool. This function sets up the argument parser for the CLI tool, including options for the YouTube API key and command-specific subparsers. It then parses the command-line arguments, retrieving the YouTube API key @@ -19,7 +18,6 @@ def main(): Raises: argparse.ArgumentError: If the YouTube API key is not provided. argparse.ArgumentError: If there is an error during the execution of the command. - """ parser = argparse.ArgumentParser(description="CLI Tool for managing YouTube videos add playlists") parser.add_argument("--api-key", type=str, help="YouTube API Key", dest="api_key") diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 6c2ddb0..5598afd 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -7,9 +7,8 @@ from datetime import datetime -class Command(): - """ - A base class for commands to inherit from, following a specific structure. +class Command: + """A base class for commands to inherit from, following a specific structure. Attributes: name (str): The name of the command. @@ -20,8 +19,7 @@ class Command(): @classmethod def generate_parser(cls, subparsers: argparse._SubParsersAction): - """ - Creates a parser for the command and adds it to the subparsers. + """Creates a parser for the command and adds it to the subparsers. Args: subparsers (argparse._SubParsersAction): The subparsers action to add the parser to. @@ -33,8 +31,7 @@ def generate_parser(cls, subparsers: argparse._SubParsersAction): @classmethod def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: - """ - Parses the arguments for the command and sets the command's execute method as the default function to call. + """Parses the arguments for the command and sets the command's execute method as the default function to call. Args: subparsers (argparse._SubParsersAction): The subparsers action to add the parser to. @@ -47,9 +44,8 @@ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: parser.set_defaults(func=cls.execute) @classmethod - def execute(cls, **kwargs) -> str: - """ - Executes the command. + def execute(cls, **kwargs) -> str: # noqa: D417 + """Executes the command. This method should be overridden by subclasses to define the command's behavior. @@ -60,8 +56,7 @@ def execute(cls, **kwargs) -> str: @staticmethod def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> List[str]: - """ - Extracts a list of URLs from a specified CSV file. + """Extracts a list of URLs from a specified CSV file. Args: file_path: The path to the CSV file containing the URLs. @@ -96,13 +91,12 @@ def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> Li @classmethod def data_to_csv(cls, data: List[Dict], output_file_path: Optional[str] = None) -> str: - """ - Converts a list of channel IDs into a CSV file. + """Converts a list of channel IDs into a CSV file. Parameters: channels_ids (List[str]): List of channel IDs to be written to the CSV. output_file_path (str, optional): Path to the file where the CSV will be saved. If not provided, the CSV will be returned as a string. - channel_id_column_name (str, optional): Name of the column in the CSV that will contain the channel IDs. + channel_id_column_name (str, optional): Name of the column in the CSV that will contain the channel IDs. If not provided, the default value defined in ChannelId.CHANNEL_ID_COLUMN_NAME will be used. Returns: diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index c648342..8e1d004 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -1,4 +1,3 @@ -import csv from pathlib import Path @@ -8,9 +7,7 @@ class ChannelId(Command): - """ - Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs) - """ + """Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs).""" name = "channel-id" arguments = [ {"name": "--urls", "type": str, "help": "Channels urls", "nargs": "*"}, @@ -24,9 +21,8 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: - """ - Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. + def execute(cls, **kwargs) -> str: # noqa: D417 + """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. It then saves these channel IDs to a CSV file if an output file path is specified. From dfc2011450d48e18effe62f2338947ad72944e8c Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:52:05 -0300 Subject: [PATCH 014/115] Update import --- youtool/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/cli.py b/youtool/cli.py index 7875342..961d2e6 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -1,7 +1,7 @@ import argparse import os -from commands import COMMANDS +from youtool.commands import COMMANDS def main(): From b1b33670fdebed9e5418ea9ea1824547f25b302a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:53:41 -0300 Subject: [PATCH 015/115] Add update command into the file --- youtool/commands/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py index 985024e..89bbc09 100644 --- a/youtool/commands/__init__.py +++ b/youtool/commands/__init__.py @@ -1,10 +1,10 @@ +from .base import Command from .channel_id import ChannelId - COMMANDS = [ ChannelId ] __all__ = [ - "COMMANDS", "ChannelId" -] \ No newline at end of file + "Command", "COMMANDS", "ChannelId", +] From 28b2574278a16e4b28cf0aeaa88347881f09f2fd Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:55:04 -0300 Subject: [PATCH 016/115] Add update --- youtool/commands/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 5598afd..077c826 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -113,4 +113,4 @@ def data_to_csv(cls, data: List[Dict], output_file_path: Optional[str] = None) - writer = csv.DictWriter(csv_file, fieldnames=list(data[0].keys()) if data else []) writer.writeheader() writer.writerows(data) - return str(output_file_path) if output_file_path else csv_file.getvalue() \ No newline at end of file + return str(output_file_path) if output_file_path else csv_file.getvalue() From fe180fb7efa5b1663ce413a816332ec7231a58a0 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:56:19 -0300 Subject: [PATCH 017/115] Add improvements to the file --- youtool/commands/channel_id.py | 27 ++++++++++++++++----------- 1 file changed, 16 insertions(+), 11 deletions(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index 8e1d004..d42f311 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -54,14 +54,7 @@ def execute(cls, **kwargs) -> str: # noqa: D417 url_column_name = kwargs.get("url_column_name") id_column_name = kwargs.get("id_column_name") - if urls_file_path and not urls: - urls = cls.data_from_csv( - file_path=Path(urls_file_path), - data_column_name=url_column_name or cls.URL_COLUMN_NAME - ) - - if not urls: - raise Exception("Either 'username' or 'url' must be provided for the channel-id command") + urls = cls.resolve_urls(urls, urls_file_path, url_column_name) youtube = YouTube([api_key], disable_ipv6=True) @@ -72,10 +65,22 @@ def execute(cls, **kwargs) -> str: # noqa: D417 result = cls.data_to_csv( data=[ { - (id_column_name or cls.CHANNEL_ID_COLUMN_NAME): channel_id for channel_id in channels_ids - } + (id_column_name or cls.CHANNEL_ID_COLUMN_NAME): channel_id + } for channel_id in channels_ids ], output_file_path=output_file_path ) - return result \ No newline at end of file + return result + + @classmethod + def resolve_urls(cls, urls, urls_file_path, url_column_name): + if urls_file_path and not urls: + urls = cls.data_from_csv( + file_path=Path(urls_file_path), + data_column_name=url_column_name or cls.URL_COLUMN_NAME + ) + + if not urls: + raise Exception("Either 'username' or 'url' must be provided for the channel-id command") + return urls From d4e66b4209628a8e28c6c8ec43c3f93f3de93a64 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:58:13 -0300 Subject: [PATCH 018/115] Add test for cli file --- tests/test_cli.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 tests/test_cli.py diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..3a489ee --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,20 @@ +import pytest + +from subprocess import run + +from youtool.commands import COMMANDS + +from youtool.commands.base import Command + + +@pytest.mark.parametrize( + "command", COMMANDS +) +def test_missing_api_key(monkeypatch: pytest.MonkeyPatch, command: Command): + monkeypatch.delenv('YOUTUBE_API_KEY', raising=False) + cli_path = "youtool/cli.py" + command = ["python", cli_path, command.name] + result = run(command, capture_output=True, text=True, check=False) + + assert result.returncode == 2 + assert "YouTube API Key is required" in result.stderr \ No newline at end of file From 4bf29ff4e4c6f7ab8143d2a424cac2b972b669b9 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:59:00 -0300 Subject: [PATCH 019/115] Add test for base file --- tests/commands/__init__.py | 0 tests/commands/test_base.py | 127 ++++++++++++++++++++++++++++++++++++ 2 files changed, 127 insertions(+) create mode 100644 tests/commands/__init__.py create mode 100644 tests/commands/test_base.py diff --git a/tests/commands/__init__.py b/tests/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py new file mode 100644 index 0000000..9d3ad90 --- /dev/null +++ b/tests/commands/test_base.py @@ -0,0 +1,127 @@ +import csv +import argparse +import pytest + +from io import StringIO +from datetime import datetime +from pathlib import Path +from unittest.mock import MagicMock, patch, mock_open +from youtool.commands import Command + + +class TestCommand(Command): + name = "command_name" + arguments = [ + {"name": "--test-arg", "help": "Test argument", "default": "default_value", "type": str} + ] + + @classmethod + def execute(cls, **kwargs): + return "executed" + +@pytest.fixture +def subparsers(): + parser = argparse.ArgumentParser() + return parser.add_subparsers() + + +def test_generate_parser(subparsers): + parser = TestCommand.generate_parser(subparsers) + + assert parser is not None, "Parser should not be None" + assert isinstance(parser, argparse.ArgumentParser), "Parser should be an instance of argparse.ArgumentParser" + assert parser.prog.endswith(TestCommand.name), f"Parser prog should end with '{TestCommand.name}'" + + +def test_parse_arguments(subparsers): + subparsers_mock = MagicMock(spec=subparsers) + + TestCommand.parse_arguments(subparsers_mock) + + subparsers_mock.add_parser.assert_called_once_with(TestCommand.name, help=TestCommand.__doc__) + parser_mock = subparsers_mock.add_parser.return_value + parser_mock.add_argument.assert_called_once_with("--test-arg", help="Test argument", default="default_value", type=str) + parser_mock.set_defaults.assert_called_once_with(func=TestCommand.execute) + + +def test_command(): + class MyCommand(Command): + pass + + with pytest.raises(NotImplementedError): + MyCommand.execute() + + +@pytest.fixture +def mock_csv_file(): + + csv_content = """URL + http://example.com + http://example2.com + """ + return csv_content + +def test_data_from_csv_valid(mock_csv_file): + with patch('pathlib.Path.is_file', return_value=True): + with patch('builtins.open', mock_open(read_data=mock_csv_file)): + data_column_name = "URL" + file_path = Path("tests/commands/csv_valid.csv") + result = Command.data_from_csv(file_path, data_column_name) + assert len(result) == 2 + assert result[0] == "http://example.com" + assert result[1] == "http://example2.com" + +def test_data_from_csv_file_not_found(): + with patch('pathlib.Path.is_file', return_value=False): + file_path = Path("/fake/path/not_found.csv") + with pytest.raises(FileNotFoundError): + Command.data_from_csv(file_path, "URL") + +def test_data_from_csv_column_not_found(mock_csv_file): + with patch('pathlib.Path.is_file', return_value=True): + with patch('builtins.open', mock_open(read_data=mock_csv_file)): + file_path = Path("tests/commands/csv_column_not_found.csv") + with pytest.raises(Exception) as exc_info: + Command.data_from_csv(file_path, "NonExistentColumn") + assert "Column NonExistentColumn not found on tests/commands/csv_column_not_found.csv" in str(exc_info.value), "Exception message should contain column not found error" + + +@pytest.fixture +def sample_data(): + return [ + {"id": "123", "name": "Channel One"}, + {"id": "456", "name": "Channel Two"} + ] + +def test_data_to_csv_with_output_file_path(tmp_path, sample_data): + output_file_path = tmp_path / "output.csv" + + result_path = Command.data_to_csv(sample_data, str(output_file_path)) + + assert result_path == str(output_file_path), "The returned path should match the provided output file path" + assert output_file_path.exists(), "The output file should exist" + with output_file_path.open('r') as f: + reader = csv.DictReader(f) + rows = list(reader) + assert len(rows) == 2, "There should be two rows in the output CSV" + assert rows[0]["id"] == "123" and rows[1]["id"] == "456", "The IDs should match the sample data" + +def test_data_to_csv_without_output_file_path(sample_data): + csv_content = Command.data_to_csv(sample_data) + + assert "id,name" in csv_content + assert "123,Channel One" in csv_content + assert "456,Channel Two" in csv_content + +def test_data_to_csv_output(tmp_path): + output_file_path = tmp_path / "output.csv" + + data = [ + {"id": 1, "name": "Test1"}, + {"id": 2, "name": "Test2"} + ] + + expected_output = "id,name\n1,Test1\n2,Test2\n" + result = Command.data_to_csv(data, str(output_file_path)) + assert Path(output_file_path).is_file() + assert expected_output == Path(output_file_path).read_text() From 216e5f2da8753ca2c00e61d00092e4baeb0e060e Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:59:45 -0300 Subject: [PATCH 020/115] Add test for channel_id command --- tests/commands/test_channel_id.py | 55 +++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 tests/commands/test_channel_id.py diff --git a/tests/commands/test_channel_id.py b/tests/commands/test_channel_id.py new file mode 100644 index 0000000..56035ee --- /dev/null +++ b/tests/commands/test_channel_id.py @@ -0,0 +1,55 @@ +import csv +import pytest + +from io import StringIO + +from unittest.mock import patch, call +from youtool.commands.channel_id import ChannelId + +@pytest.fixture +def csv_file(tmp_path): + csv_content = "channel_url\nhttps://www.youtube.com/@Turicas/featured\n" + csv_file = tmp_path / "urls.csv" + csv_file.write_text(csv_content) + return csv_file + +@pytest.fixture +def youtube_api_mock(): + with patch("youtool.commands.channel_id.YouTube") as mock: + mock.return_value.channel_id_from_url.side_effect = lambda url: f"channel-{url}" + yield mock + +def test_channels_ids_csv_preparation(youtube_api_mock): + urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] + api_key = "test_api_key" + id_column_name = "custom_id_column" + expected_result_data = [ + {id_column_name: "channel-https://www.youtube.com/@Turicas/featured"}, + {id_column_name: "channel-https://www.youtube.com/c/PythonicCaf%C3%A9"} + ] + with StringIO() as csv_file: + writer = csv.DictWriter(csv_file, fieldnames=[id_column_name]) + writer.writeheader() + writer.writerows(expected_result_data) + expected_result_csv = csv_file.getvalue() + + result = ChannelId.execute(urls=urls, api_key=api_key, id_column_name=id_column_name) + + youtube_api_mock.return_value.channel_id_from_url.assert_has_calls([call(url) for url in urls], any_order=True) + assert result == expected_result_csv + + +def test_resolve_urls_with_direct_urls(): + # Tests whether the function returns the directly given list of URLs. + urls = ["https://www.youtube.com/@Turicas/featured"] + result = ChannelId.resolve_urls(urls, None, None) + assert result == urls + +def test_resolve_urls_with_file_path(csv_file): + result = ChannelId.resolve_urls(None, csv_file, "channel_url") + assert result == ["https://www.youtube.com/@Turicas/featured"] + +def test_resolve_urls_raises_exception(): + # Tests whether the function throws an exception when neither urls nor urls_file_path are provided. + with pytest.raises(Exception, match="Either 'username' or 'url' must be provided for the channel-id command"): + ChannelId.resolve_urls(None, None, None) From 1b335b7f184de9c0c6b2678d050072de4c6d5d95 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 15:26:20 -0300 Subject: [PATCH 021/115] add docstrings --- tests/commands/test_base.py | 47 +++++++++++++++++++++++++++++++ tests/commands/test_channel_id.py | 29 +++++++++++++++++-- tests/test_cli.py | 5 ++++ 3 files changed, 79 insertions(+), 2 deletions(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 9d3ad90..e9265e8 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -21,11 +21,17 @@ def execute(cls, **kwargs): @pytest.fixture def subparsers(): + """Fixture to create subparsers for argument parsing.""" parser = argparse.ArgumentParser() return parser.add_subparsers() def test_generate_parser(subparsers): + """Test to verify the parser generation. + + This test checks if the `generate_parser` method correctly generates a parser + for the command and sets the appropriate properties + """ parser = TestCommand.generate_parser(subparsers) assert parser is not None, "Parser should not be None" @@ -34,6 +40,11 @@ def test_generate_parser(subparsers): def test_parse_arguments(subparsers): + """Test to verify argument parsing. + + This test checks if the `parse_arguments` method correctly adds the command's + arguments to the parser and sets the default function to the command's execute method. + """ subparsers_mock = MagicMock(spec=subparsers) TestCommand.parse_arguments(subparsers_mock) @@ -45,6 +56,11 @@ def test_parse_arguments(subparsers): def test_command(): + """Test to verify that the `execute` method is implemented. + + This test ensures that if a command does not implement the `execute` method, + a `NotImplementedError` is raised. + """ class MyCommand(Command): pass @@ -54,6 +70,7 @@ class MyCommand(Command): @pytest.fixture def mock_csv_file(): + """Fixture to provide mock CSV content for tests.""" csv_content = """URL http://example.com @@ -62,6 +79,14 @@ def mock_csv_file(): return csv_content def test_data_from_csv_valid(mock_csv_file): + """Test to verify reading data from a valid CSV file. + + This test checks if the `data_from_csv` method correctly reads data from a valid CSV file + and returns the expected list of URLs. + + Args: + mock_csv_file (str): The mock CSV file content. + """ with patch('pathlib.Path.is_file', return_value=True): with patch('builtins.open', mock_open(read_data=mock_csv_file)): data_column_name = "URL" @@ -72,6 +97,11 @@ def test_data_from_csv_valid(mock_csv_file): assert result[1] == "http://example2.com" def test_data_from_csv_file_not_found(): + """Test to verify behavior when the specified column is not found in the CSV file. + + This test checks if the `data_from_csv` method raises an exception when the specified + column does not exist in the CSV file. + """ with patch('pathlib.Path.is_file', return_value=False): file_path = Path("/fake/path/not_found.csv") with pytest.raises(FileNotFoundError): @@ -88,12 +118,18 @@ def test_data_from_csv_column_not_found(mock_csv_file): @pytest.fixture def sample_data(): + """Fixture to provide sample data for tests.""" return [ {"id": "123", "name": "Channel One"}, {"id": "456", "name": "Channel Two"} ] def test_data_to_csv_with_output_file_path(tmp_path, sample_data): + """Test to verify writing data to a CSV file with an output file path specified. + + This test checks if the `data_to_csv` method correctly writes the sample data to + a CSV file when an output file path is provided. + """ output_file_path = tmp_path / "output.csv" result_path = Command.data_to_csv(sample_data, str(output_file_path)) @@ -107,6 +143,11 @@ def test_data_to_csv_with_output_file_path(tmp_path, sample_data): assert rows[0]["id"] == "123" and rows[1]["id"] == "456", "The IDs should match the sample data" def test_data_to_csv_without_output_file_path(sample_data): + """Test to verify writing data to a CSV format without an output file path specified. + + This test checks if the `data_to_csv` method correctly returns the CSV content + as a string when no output file path is provided. + """ csv_content = Command.data_to_csv(sample_data) assert "id,name" in csv_content @@ -114,6 +155,12 @@ def test_data_to_csv_without_output_file_path(sample_data): assert "456,Channel Two" in csv_content def test_data_to_csv_output(tmp_path): + """ + Test to verify the content of the output CSV file. + + This test checks if the `data_to_csv` method writes the expected content + to the output CSV file. + """ output_file_path = tmp_path / "output.csv" data = [ diff --git a/tests/commands/test_channel_id.py b/tests/commands/test_channel_id.py index 56035ee..04400ef 100644 --- a/tests/commands/test_channel_id.py +++ b/tests/commands/test_channel_id.py @@ -8,6 +8,8 @@ @pytest.fixture def csv_file(tmp_path): + """Fixture to create a temporary CSV file with a single YouTube channel URL.""" + csv_content = "channel_url\nhttps://www.youtube.com/@Turicas/featured\n" csv_file = tmp_path / "urls.csv" csv_file.write_text(csv_content) @@ -15,11 +17,21 @@ def csv_file(tmp_path): @pytest.fixture def youtube_api_mock(): + """Fixture to mock the YouTube API. + + This fixture mocks the `YouTube` class and its `channel_id_from_url` method + to return a channel ID based on the URL. + """ with patch("youtool.commands.channel_id.YouTube") as mock: mock.return_value.channel_id_from_url.side_effect = lambda url: f"channel-{url}" yield mock def test_channels_ids_csv_preparation(youtube_api_mock): + """Fixture to mock the YouTube API. + + This fixture mocks the `YouTube` class and its `channel_id_from_url` method + to return a channel ID based on the URL. + """ urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] api_key = "test_api_key" id_column_name = "custom_id_column" @@ -40,16 +52,29 @@ def test_channels_ids_csv_preparation(youtube_api_mock): def test_resolve_urls_with_direct_urls(): - # Tests whether the function returns the directly given list of URLs. + """Test to verify resolving URLs when provided directly. + + This test checks if the `resolve_urls` method of the `ChannelId` class correctly + returns the given list of URLs when provided directly. + """ urls = ["https://www.youtube.com/@Turicas/featured"] result = ChannelId.resolve_urls(urls, None, None) assert result == urls def test_resolve_urls_with_file_path(csv_file): + """Test to verify resolving URLs from a CSV file. + + This test checks if the `resolve_urls` method of the `ChannelId` class correctly + reads URLs from a given CSV file. + """ result = ChannelId.resolve_urls(None, csv_file, "channel_url") assert result == ["https://www.youtube.com/@Turicas/featured"] def test_resolve_urls_raises_exception(): - # Tests whether the function throws an exception when neither urls nor urls_file_path are provided. + """Test to verify exception raising when no URLs are provided. + + This test checks if the `resolve_urls` method of the `ChannelId` class raises an exception + when neither direct URLs nor a file path are provided. + """ with pytest.raises(Exception, match="Either 'username' or 'url' must be provided for the channel-id command"): ChannelId.resolve_urls(None, None, None) diff --git a/tests/test_cli.py b/tests/test_cli.py index 3a489ee..9165041 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -11,6 +11,11 @@ "command", COMMANDS ) def test_missing_api_key(monkeypatch: pytest.MonkeyPatch, command: Command): + """Test to verify behavior when the YouTube API key is missing. + + This test ensures that when the YouTube API key is not set, running any command + from the youtool CLI results in an appropriate error message and exit code. + """ monkeypatch.delenv('YOUTUBE_API_KEY', raising=False) cli_path = "youtool/cli.py" command = ["python", cli_path, command.name] From c5ad8fd7302c85af58972e850f297de202f2ff6a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 25 Jun 2024 14:45:38 -0300 Subject: [PATCH 022/115] - Implement ChannelInfo class to fetch YouTube channel information from URLs, usernames, or CSV files containing them; - Add method to filter channel information based on specified columns; - Define method to handle the command logic, including reading input, fetching channel data, and saving to CSV; - Support for various input methods including direct URLs/usernames and file paths for CSV input; - Support for specifying output CSV file path and columns to include in the output. --- youtool/commands/channel_info.py | 120 +++++++++++++++++++++++++++++++ 1 file changed, 120 insertions(+) create mode 100644 youtool/commands/channel_info.py diff --git a/youtool/commands/channel_info.py b/youtool/commands/channel_info.py new file mode 100644 index 0000000..493ef82 --- /dev/null +++ b/youtool/commands/channel_info.py @@ -0,0 +1,120 @@ +import csv + +from typing import List, Dict, Optional, Self + +from youtool import YouTube + +from .base import Command + + +class ChannelInfo(Command): + """ + Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output + (same schema for `channel` dicts) + """ + name = "channel-info" + arguments = [ + {"name": "--urls", "type": str, "help": "Channel URLs", "nargs": "*"}, + {"name": "--usernames", "type": str, "help": "Channel usernames", "nargs": "*"}, + {"name": "--ids", "type": str, "help": "Channel IDs", "nargs": "*"}, + {"name": "--urls-file-path", "type": str, "help": "Channel URLs CSV file path"}, + {"name": "--usernames-file-path", "type": str, "help": "Channel usernames CSV file path"}, + {"name": "--ids-file-path", "type": str, "help": "Channel IDs CSV file path"}, + {"name": "--output-file-path", "type": str, "help": "Output CSV file path"}, + {"name": "--url-column-name", "type": str, "help": "URL column name on CSV input files"}, + {"name": "--username-column-name", "type": str, "help": "Username column name on CSV input files"}, + {"name": "--id-column-name", "type": str, "help": "ID column name on CSV input files"}, + ] + + URL_COLUMN_NAME: str = "channel_url" + USERNAME_COLUMN_NAME: str = "channel_username" + ID_COLUMN_NAME: str = "channel_id" + INFO_COLUMNS: List[str] = [ + "id", "title", "description", "published_at", "view_count", "subscriber_count", "video_count" + ] + + @staticmethod + def filter_fields(channel_info: Dict, info_columns: Optional[List] = None): + """ + Filters the fields of a dictionary containing channel information based on + specified columns. + + Args: + channel_info (Dict): A dictionary containing channel information. + info_columns (Optional[List], optional): A list specifying which fields + to include in the filtered output. If None, returns the entire + channel_info dictionary. Defaults to None. + + Returns: + Dict: A dictionary containing only the fields specified in info_columns + (if provided) or the entire channel_info dictionary if info_columns is None. + """ + return { + field: value for field, value in channel_info.items() if field in info_columns + } if info_columns else channel_info + + @classmethod + def execute(cls: Self, **kwargs) -> str: + """ + Execute the channel-info command to fetch YouTube channel information from URLs or usernames and save them to a CSV file. + + Args: + urls (list[str], optional): A list of YouTube channel URLs. If not provided, `urls_file_path` must be specified. + usernames (list[str], optional): A list of YouTube channel usernames. If not provided, `usernames_file_path` must be specified. + urls_file_path (str, optional): Path to a CSV file containing YouTube channel URLs. + usernames_file_path (str, optional): Path to a CSV file containing YouTube channel usernames. + output_file_path (str, optional): Path to the output CSV file where channel information will be saved. + api_key (str): The API key to authenticate with the YouTube Data API. + url_column_name (str, optional): The name of the column in the `urls_file_path` CSV file that contains the URLs. + Default is "channel_url". + username_column_name (str, optional): The name of the column in the `usernames_file_path` CSV file that contains the usernames. + Default is "channel_username". + info_columns (str, optional): Comma-separated list of columns to include in the output CSV. Default is the class attribute `INFO_COLUMNS`. + + Returns: + str: A message indicating the result of the command. If `output_file_path` is specified, the message will + include the path to the generated CSV file. Otherwise, it will return the result as a string. + + Raises: + Exception: If neither `urls`, `usernames`, `urls_file_path` nor `usernames_file_path` is provided. + """ + urls = kwargs.get("urls") + usernames = kwargs.get("usernames") + urls_file_path = kwargs.get("urls_file_path") + usernames_file_path = kwargs.get("usernames_file_path") + output_file_path = kwargs.get("output_file_path") + api_key = kwargs.get("api_key") + + url_column_name = kwargs.get("url_column_name") + username_column_name = kwargs.get("username_column_name") + info_columns = kwargs.get("info_columns") + + info_columns = [ + column.strip() for column in info_columns.split(",") + ] if info_columns else ChannelInfo.INFO_COLUMNS + + if urls_file_path and not urls: + urls = ChannelInfo.data_from_file(urls_file_path, url_column_name) + if usernames_file_path and not usernames: + usernames = ChannelInfo.data_from_file(usernames_file_path, username_column_name) + + if not urls and not usernames: + raise Exception("Either 'urls' or 'usernames' must be provided for the channel-info command") + + youtube = YouTube([api_key], disable_ipv6=True) + + channels_ids = [ + youtube.channel_id_from_url(url) for url in (urls or []) if url + ] + [ + youtube.channel_id_from_username(username) for username in (usernames or []) if username + ] + channel_ids = [channel_id for channel_id in channels_ids if channel_id] + + return cls.data_to_csv( + data=[ + ChannelInfo.filter_fields( + channel_info, info_columns + ) for channel_info in (youtube.channels_infos(channel_ids) or []) + ], + output_file_path=output_file_path + ) \ No newline at end of file From e718d4a1acc2482395ede78a16353a5a32138def Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 25 Jun 2024 14:47:42 -0300 Subject: [PATCH 023/115] - Included ChannelInfo in the list of commands in COMMANDS. --- youtool/commands/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py index 89bbc09..1939a22 100644 --- a/youtool/commands/__init__.py +++ b/youtool/commands/__init__.py @@ -1,10 +1,12 @@ from .base import Command from .channel_id import ChannelId +from .channel_info import ChannelInfo COMMANDS = [ - ChannelId + ChannelId, + ChannelInfo ] __all__ = [ - "Command", "COMMANDS", "ChannelId", + COMMANDS, ChannelId, ChannelInfo ] From 7dc7b8d297122045191ccc7d94d90170f15518bf Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:21:05 -0300 Subject: [PATCH 024/115] Add updates docstrings --- youtool/commands/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 077c826..275c282 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -44,7 +44,7 @@ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: parser.set_defaults(func=cls.execute) @classmethod - def execute(cls, **kwargs) -> str: # noqa: D417 + def execute(cls, **kwargs) -> str: """Executes the command. This method should be overridden by subclasses to define the command's behavior. From ed012e55368eed19b93f534bd85b72726b44248b Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:22:26 -0300 Subject: [PATCH 025/115] Add updates docstrings --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index d42f311..c599982 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: # noqa: D417 + def execute(cls, **kwargs) -> str: """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From 9a5fe66e52486d3fa7840cfd1b7f98d4a79cf5ee Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:51:39 -0300 Subject: [PATCH 026/115] - Add updates --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index c599982..d42f311 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: + def execute(cls, **kwargs) -> str: # noqa: D417 """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From 8ba47cf3a9a20f0544964d29db64607287e260fe Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:53:51 -0300 Subject: [PATCH 027/115] - Add updates --- youtool/commands/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 275c282..077c826 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -44,7 +44,7 @@ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: parser.set_defaults(func=cls.execute) @classmethod - def execute(cls, **kwargs) -> str: + def execute(cls, **kwargs) -> str: # noqa: D417 """Executes the command. This method should be overridden by subclasses to define the command's behavior. From c08e4ecf7090c1e458ff2238578a8195e450d725 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 13:57:55 -0300 Subject: [PATCH 028/115] - Add test for channel_info command; - Add update channel_info file; - fix test_base --- tests/commands/test_base.py | 16 ++++----- tests/commands/test_channel_info.py | 53 +++++++++++++++++++++++++++++ youtool/commands/channel_info.py | 6 ++-- 3 files changed, 64 insertions(+), 11 deletions(-) create mode 100644 tests/commands/test_channel_info.py diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index e9265e8..e15c787 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -2,8 +2,6 @@ import argparse import pytest -from io import StringIO -from datetime import datetime from pathlib import Path from unittest.mock import MagicMock, patch, mock_open from youtool.commands import Command @@ -90,7 +88,7 @@ def test_data_from_csv_valid(mock_csv_file): with patch('pathlib.Path.is_file', return_value=True): with patch('builtins.open', mock_open(read_data=mock_csv_file)): data_column_name = "URL" - file_path = Path("tests/commands/csv_valid.csv") + file_path = Path("tests/resources/csv_valid.csv") result = Command.data_from_csv(file_path, data_column_name) assert len(result) == 2 assert result[0] == "http://example.com" @@ -110,10 +108,10 @@ def test_data_from_csv_file_not_found(): def test_data_from_csv_column_not_found(mock_csv_file): with patch('pathlib.Path.is_file', return_value=True): with patch('builtins.open', mock_open(read_data=mock_csv_file)): - file_path = Path("tests/commands/csv_column_not_found.csv") + file_path = Path("tests/resources/csv_column_not_found.csv") with pytest.raises(Exception) as exc_info: Command.data_from_csv(file_path, "NonExistentColumn") - assert "Column NonExistentColumn not found on tests/commands/csv_column_not_found.csv" in str(exc_info.value), "Exception message should contain column not found error" + assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value) @pytest.fixture @@ -134,13 +132,13 @@ def test_data_to_csv_with_output_file_path(tmp_path, sample_data): result_path = Command.data_to_csv(sample_data, str(output_file_path)) - assert result_path == str(output_file_path), "The returned path should match the provided output file path" - assert output_file_path.exists(), "The output file should exist" + assert result_path == str(output_file_path) + assert output_file_path.exists() with output_file_path.open('r') as f: reader = csv.DictReader(f) rows = list(reader) - assert len(rows) == 2, "There should be two rows in the output CSV" - assert rows[0]["id"] == "123" and rows[1]["id"] == "456", "The IDs should match the sample data" + assert len(rows) == 2 + assert rows[0]["id"] == "123" and rows[1]["id"] == "456" def test_data_to_csv_without_output_file_path(sample_data): """Test to verify writing data to a CSV format without an output file path specified. diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py new file mode 100644 index 0000000..06b3a66 --- /dev/null +++ b/tests/commands/test_channel_info.py @@ -0,0 +1,53 @@ +import pytest + +from unittest.mock import patch, Mock, call + +from youtool.commands.channel_info import ChannelInfo, YouTube + + +def test_filter_fields(): + channel_info = { + 'channel_id': '123456', + 'channel_name': 'Test Channel', + 'subscribers': 1000, + 'videos': 50, + 'category': 'Tech' + } + + info_columns = ['channel_id', 'channel_name', 'subscribers'] + filtered_info = ChannelInfo.filter_fields(channel_info, info_columns) + + expected_result = { + 'channel_id': '123456', + 'channel_name': 'Test Channel', + 'subscribers': 1000 + } + + assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" + + +def test_channel_ids_from_urls_and_usernames(mocker): + urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] + usernames = ["Turicas", "PythonicCafe"] + + ids_from_urls_mock = "id_from_url" + ids_from_usernames_mock = "id_from_username" + youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") + + channel_id_from_url_mock = Mock(return_value=ids_from_urls_mock) + channel_id_from_username_mock = Mock(return_value=ids_from_usernames_mock) + channels_infos_mock = Mock(return_value=[]) + + youtube_mock.return_value.channel_id_from_url = channel_id_from_url_mock + youtube_mock.return_value.channel_id_from_username = channel_id_from_username_mock + youtube_mock.return_value.channels_infos = channels_infos_mock + + ChannelInfo.execute(urls=urls, usernames=usernames) + + channel_id_from_url_mock.assert_has_calls( + [call(url) for url in urls] + ) + channel_id_from_username_mock.assert_has_calls( + [call(username) for username in usernames] + ) + channels_infos_mock.assert_called_once_with([ids_from_urls_mock, ids_from_usernames_mock]) diff --git a/youtool/commands/channel_info.py b/youtool/commands/channel_info.py index 493ef82..fb0944e 100644 --- a/youtool/commands/channel_info.py +++ b/youtool/commands/channel_info.py @@ -108,7 +108,9 @@ def execute(cls: Self, **kwargs) -> str: ] + [ youtube.channel_id_from_username(username) for username in (usernames or []) if username ] - channel_ids = [channel_id for channel_id in channels_ids if channel_id] + channel_ids = list( + set([channel_id for channel_id in channels_ids if channel_id]) + ) return cls.data_to_csv( data=[ @@ -117,4 +119,4 @@ def execute(cls: Self, **kwargs) -> str: ) for channel_info in (youtube.channels_infos(channel_ids) or []) ], output_file_path=output_file_path - ) \ No newline at end of file + ) From a5bb13d54c0aa8474126fe923c026bb6ab268974 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 15:34:28 -0300 Subject: [PATCH 029/115] add docstrings --- tests/commands/test_channel_info.py | 16 ++++++++++++---- youtool/commands/channel_info.py | 14 +++++++------- 2 files changed, 19 insertions(+), 11 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 06b3a66..5e6ef33 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -1,11 +1,14 @@ -import pytest +from unittest.mock import Mock, call -from unittest.mock import patch, Mock, call - -from youtool.commands.channel_info import ChannelInfo, YouTube +from youtool.commands.channel_info import ChannelInfo def test_filter_fields(): + """Test to verify the filtering of channel information fields. + + This test checks if the `filter_fields` method of the `ChannelInfo` class correctly + filters out unwanted fields from the channel information dictionary based on the provided columns. + """ channel_info = { 'channel_id': '123456', 'channel_name': 'Test Channel', @@ -27,6 +30,11 @@ def test_filter_fields(): def test_channel_ids_from_urls_and_usernames(mocker): + """Test to verify fetching channel IDs from both URLs and usernames. + + This test checks if the `execute` method of the `ChannelInfo` class correctly fetches channel IDs + from a list of URLs and usernames, and then calls the `channels_infos` method with these IDs. + """ urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] usernames = ["Turicas", "PythonicCafe"] diff --git a/youtool/commands/channel_info.py b/youtool/commands/channel_info.py index fb0944e..09103af 100644 --- a/youtool/commands/channel_info.py +++ b/youtool/commands/channel_info.py @@ -8,8 +8,7 @@ class ChannelInfo(Command): - """ - Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output + """Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output (same schema for `channel` dicts) """ name = "channel-info" @@ -35,8 +34,7 @@ class ChannelInfo(Command): @staticmethod def filter_fields(channel_info: Dict, info_columns: Optional[List] = None): - """ - Filters the fields of a dictionary containing channel information based on + """Filters the fields of a dictionary containing channel information based on specified columns. Args: @@ -55,8 +53,8 @@ def filter_fields(channel_info: Dict, info_columns: Optional[List] = None): @classmethod def execute(cls: Self, **kwargs) -> str: - """ - Execute the channel-info command to fetch YouTube channel information from URLs or usernames and save them to a CSV file. + """Execute the channel-info command to fetch YouTube channel information from URLs or + usernames and save them to a CSV file. Args: urls (list[str], optional): A list of YouTube channel URLs. If not provided, `urls_file_path` must be specified. @@ -69,7 +67,8 @@ def execute(cls: Self, **kwargs) -> str: Default is "channel_url". username_column_name (str, optional): The name of the column in the `usernames_file_path` CSV file that contains the usernames. Default is "channel_username". - info_columns (str, optional): Comma-separated list of columns to include in the output CSV. Default is the class attribute `INFO_COLUMNS`. + info_columns (str, optional): Comma-separated list of columns to include in the output CSV. + Default is the class attribute `INFO_COLUMNS`. Returns: str: A message indicating the result of the command. If `output_file_path` is specified, the message will @@ -78,6 +77,7 @@ def execute(cls: Self, **kwargs) -> str: Raises: Exception: If neither `urls`, `usernames`, `urls_file_path` nor `usernames_file_path` is provided. """ + urls = kwargs.get("urls") usernames = kwargs.get("usernames") urls_file_path = kwargs.get("urls_file_path") From 923170e226881b2e4dd570fb74e9d3023f5cb344 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 23:16:33 -0300 Subject: [PATCH 030/115] fix --- tests/commands/test_channel_info.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 5e6ef33..67f5c3d 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -1,3 +1,5 @@ +import pytest + from unittest.mock import Mock, call from youtool.commands.channel_info import ChannelInfo From d77a36bd8aab208f0039365a0ad16307d26c161e Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Sun, 30 Jun 2024 19:29:28 -0300 Subject: [PATCH 031/115] Add new command in list --- youtool/commands/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py index 1939a22..51afcfc 100644 --- a/youtool/commands/__init__.py +++ b/youtool/commands/__init__.py @@ -1,12 +1,14 @@ from .base import Command from .channel_id import ChannelId from .channel_info import ChannelInfo +from .video_info import VideoInfo COMMANDS = [ ChannelId, - ChannelInfo + ChannelInfo, + VideoInfo ] __all__ = [ - COMMANDS, ChannelId, ChannelInfo + COMMANDS, ChannelId, ChannelInfo, VideoInfo ] From 734e98129d74479679bb863ebb7b304efed579ce Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Sun, 30 Jun 2024 19:32:20 -0300 Subject: [PATCH 032/115] - Implement CSV input processing for video IDs and URLs in VideoInfo class; --- youtool/commands/video_info.py | 106 +++++++++++++++++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 youtool/commands/video_info.py diff --git a/youtool/commands/video_info.py b/youtool/commands/video_info.py new file mode 100644 index 0000000..8fd8eed --- /dev/null +++ b/youtool/commands/video_info.py @@ -0,0 +1,106 @@ +import csv + +from typing import List, Dict, Optional, Self + +from youtool import YouTube + +from .base import Command + + +class VideoInfo(Command): + """Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (same schema for video dicts)") + """ + name = "video-info" + arguments = [ + {"name": "--ids", "type": str, "help": "Video IDs", "nargs": "*"}, + {"name": "--urls", "type": str, "help": "Video URLs", "nargs": "*"}, + {"name": "--input-file-path", "type": str, "help": "Input CSV file path with URLs/IDs"}, + {"name": "--output-file-path", "type": str, "help": "Output CSV file path"} + ] + + ID_COLUMN_NAME: str = "video_id" + URL_COLUMN_NAME: str = "video_url" + INFO_COLUMNS: List[str] = [ + "id", "title", "description", "published_at", "view_count", "like_count", "comment_count" + ] + + @staticmethod + def filter_fields(video_info: Dict, info_columns: Optional[List] = None) -> Dict: + """Filters the fields of a dictionary containing video information based on specified columns. + + Args: + video_info (Dict): A dictionary containing video information. + info_columns (Optional[List], optional): A list specifying which fields to include in the filtered output. + If None, returns the entire video_info dictionary. Defaults to None. + + Returns: + A dictionary containing only the fields specified in info_columns (if provided) + or the entire video_info dictionary if info_columns is None. + """ + return { + field: value for field, value in video_info.items() if field in info_columns + } if info_columns else video_info + + @classmethod + def execute(cls: Self, **kwargs) -> str: + """ + Execute the video-info command to fetch YouTube video information from IDs or URLs and save them to a CSV file. + + Args: + ids (list[str], optional): A list of YouTube video IDs. If not provided, input_file_path must be specified. + urls (list[str], optional): A list of YouTube video URLs. If not provided, input_file_path must be specified. + urls_file_path (str, optional): Path to a CSV file containing YouTube channel URLs. + ids_file_path (str, optional): Path to a CSV file containing YouTube channel IDs. + input_file_path (str, optional): Path to a CSV file containing YouTube video URLs or IDs. + output_file_path (str, optional): Path to the output CSV file where video information will be saved. + api_key (str): The API key to authenticate with the YouTube Data API. + url_column_name (str, optional): The name of the column in the input_file_path CSV file that contains the URLs. + Default is "video_url". + id_column_name (str, optional): The name of the column in the input_file_path CSV file that contains the IDs. + Default is "video_id". + info_columns (str, optional): Comma-separated list of columns to include in the output CSV. Default is the class attribute INFO_COLUMNS. + + Returns: + A message indicating the result of the command. If output_file_path is specified, + the message will include the path to the generated CSV file. + Otherwise, it will return the result as a string. + + Raises: + Exception: If neither ids, urls, nor input_file_path is provided. + """ + ids = kwargs.get("ids") + urls = kwargs.get("urls") + input_file_path = kwargs.get("input_file_path") + output_file_path = kwargs.get("output_file_path") + api_key = kwargs.get("api_key") + + info_columns = kwargs.get("info_columns") + + info_columns = [ + column.strip() for column in info_columns.split(",") + ] if info_columns else VideoInfo.INFO_COLUMNS + + if input_file_path: + with open(input_file_path, mode='r') as infile: + reader = csv.DictReader(infile) + for row in reader: + if cls.ID_COLUMN_NAME in row: + ids.append(row[cls.ID_COLUMN_NAME]) + elif cls.URL_COLUMN_NAME in row: + urls.append(row[cls.URL_COLUMN_NAME]) + + if not ids and not urls: + raise Exception("Either 'ids' or 'urls' must be provided for the video-info command") + + youtube = YouTube([api_key], disable_ipv6=True) + + video_infos = list(youtube.videos_infos(ids)) + + return cls.data_to_csv( + data=[ + VideoInfo.filter_fields( + video_info, info_columns + ) for video_info in video_infos + ], + output_file_path=output_file_path + ) From e9000ca074771b4c57c9ec5c0651f71afc44215b Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:21:05 -0300 Subject: [PATCH 033/115] Add updates docstrings --- youtool/commands/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 077c826..275c282 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -44,7 +44,7 @@ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: parser.set_defaults(func=cls.execute) @classmethod - def execute(cls, **kwargs) -> str: # noqa: D417 + def execute(cls, **kwargs) -> str: """Executes the command. This method should be overridden by subclasses to define the command's behavior. From d4327d1851c36d5f54b6ed574f4dacb0e7591e43 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:22:26 -0300 Subject: [PATCH 034/115] Add updates docstrings --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index d42f311..c599982 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: # noqa: D417 + def execute(cls, **kwargs) -> str: """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From c4134e0dbdabd0adb307da566e8c93b8a0a1510c Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:51:39 -0300 Subject: [PATCH 035/115] - Add updates --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index c599982..d42f311 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: + def execute(cls, **kwargs) -> str: # noqa: D417 """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From 068340369caf920ef6d2ba074193613cca92c2ce Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:53:51 -0300 Subject: [PATCH 036/115] - Add updates --- youtool/commands/base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 275c282..077c826 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -44,7 +44,7 @@ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: parser.set_defaults(func=cls.execute) @classmethod - def execute(cls, **kwargs) -> str: + def execute(cls, **kwargs) -> str: # noqa: D417 """Executes the command. This method should be overridden by subclasses to define the command's behavior. From 60bd1447d79332b595346cedc8a09a5e46bf34d0 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 12:40:16 -0300 Subject: [PATCH 037/115] Add update --- tests/commands/test_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index e15c787..90fd955 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -111,7 +111,7 @@ def test_data_from_csv_column_not_found(mock_csv_file): file_path = Path("tests/resources/csv_column_not_found.csv") with pytest.raises(Exception) as exc_info: Command.data_from_csv(file_path, "NonExistentColumn") - assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value) + assert f"Column NonExistentColumn not found on {file_path}" in str(exc_info.value) @pytest.fixture @@ -170,3 +170,4 @@ def test_data_to_csv_output(tmp_path): result = Command.data_to_csv(data, str(output_file_path)) assert Path(output_file_path).is_file() assert expected_output == Path(output_file_path).read_text() + assert str(output_file_path) == result From 916d6331256e4b665a924ec48dd9e436f3e6f94c Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 13:11:36 -0300 Subject: [PATCH 038/115] add config optional argmuments --- youtool/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/cli.py b/youtool/cli.py index 961d2e6..7a430e5 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -21,7 +21,7 @@ def main(): """ parser = argparse.ArgumentParser(description="CLI Tool for managing YouTube videos add playlists") parser.add_argument("--api-key", type=str, help="YouTube API Key", dest="api_key") - parser.add_argument("--debug", type=bool, help="Debug mode", dest="debug") + parser.add_argument("--debug", default=False, action="store_true", help="Debug mode", dest="debug") subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed") From 65f44fba5cce5047ff06fe4f09663f05ca53295f Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 13:14:23 -0300 Subject: [PATCH 039/115] add not implemented error --- youtool/commands/video_info.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/youtool/commands/video_info.py b/youtool/commands/video_info.py index 8fd8eed..b375111 100644 --- a/youtool/commands/video_info.py +++ b/youtool/commands/video_info.py @@ -93,14 +93,21 @@ def execute(cls: Self, **kwargs) -> str: raise Exception("Either 'ids' or 'urls' must be provided for the video-info command") youtube = YouTube([api_key], disable_ipv6=True) + + videos_infos = [] + + if ids: + videos_infos += list(youtube.videos_infos(ids)) + if urls: + # TODO: add get videos_infos using urls to youtool + raise NotImplementedError("videos_infos by url not implemented yet") - video_infos = list(youtube.videos_infos(ids)) return cls.data_to_csv( data=[ VideoInfo.filter_fields( video_info, info_columns - ) for video_info in video_infos + ) for video_info in videos_infos ], output_file_path=output_file_path ) From eddfb96c85eacece31cbed338d281bdfa84e5e0a Mon Sep 17 00:00:00 2001 From: Ana Paula Sales Date: Wed, 3 Jul 2024 19:56:25 -0300 Subject: [PATCH 040/115] add video_id_from_url static method --- youtool/commands/base.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 077c826..414b813 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -5,6 +5,7 @@ from io import StringIO from pathlib import Path from datetime import datetime +from urllib.parse import urlparse, parse_qsl class Command: @@ -17,6 +18,12 @@ class Command: name: str arguments: List[Dict[str, Any]] + @staticmethod + def video_id_from_url(video_url: str) -> Optional[str]: + parsed_url = urlparse(video_url) + parsed_url_query = dict(parse_qsl(parsed_url.query)) + return parsed_url_query.get("v") + @classmethod def generate_parser(cls, subparsers: argparse._SubParsersAction): """Creates a parser for the command and adds it to the subparsers. From b344a72593ebd83b9770cc7f416cb270829f9500 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 20:15:54 -0300 Subject: [PATCH 041/115] add video-info from url case --- youtool/commands/video_info.py | 15 ++++++--------- 1 file changed, 6 insertions(+), 9 deletions(-) diff --git a/youtool/commands/video_info.py b/youtool/commands/video_info.py index b375111..35ca6a1 100644 --- a/youtool/commands/video_info.py +++ b/youtool/commands/video_info.py @@ -68,8 +68,8 @@ def execute(cls: Self, **kwargs) -> str: Raises: Exception: If neither ids, urls, nor input_file_path is provided. """ - ids = kwargs.get("ids") - urls = kwargs.get("urls") + ids = kwargs.get("ids", []) + urls = kwargs.get("urls", []) input_file_path = kwargs.get("input_file_path") output_file_path = kwargs.get("output_file_path") api_key = kwargs.get("api_key") @@ -93,16 +93,13 @@ def execute(cls: Self, **kwargs) -> str: raise Exception("Either 'ids' or 'urls' must be provided for the video-info command") youtube = YouTube([api_key], disable_ipv6=True) - - videos_infos = [] - if ids: - videos_infos += list(youtube.videos_infos(ids)) if urls: - # TODO: add get videos_infos using urls to youtool - raise NotImplementedError("videos_infos by url not implemented yet") - + ids += [cls.video_id_from_url(url) for url in urls] + # Remove duplicated + ids = list(set(ids)) + videos_infos = list(youtube.videos_infos([_id for _id in ids if _id])) return cls.data_to_csv( data=[ VideoInfo.filter_fields( From 64252d73ab9cda05abacbf7f346a3c66f493fe25 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 13:57:55 -0300 Subject: [PATCH 042/115] - Add test for channel_info command; - Add update channel_info file; - fix test_base --- tests/commands/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 90fd955..c87ddea 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -111,7 +111,7 @@ def test_data_from_csv_column_not_found(mock_csv_file): file_path = Path("tests/resources/csv_column_not_found.csv") with pytest.raises(Exception) as exc_info: Command.data_from_csv(file_path, "NonExistentColumn") - assert f"Column NonExistentColumn not found on {file_path}" in str(exc_info.value) + assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value) @pytest.fixture From 801e5f34c023f16a6c0eae6ebab3d2757bc115fa Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 16:31:54 -0300 Subject: [PATCH 043/115] Add test for video_info command; - Add improvements to base file; - Add changes to test_base file --- tests/commands/test_base.py | 20 ++++++++ tests/commands/test_video_info.py | 80 +++++++++++++++++++++++++++++++ youtool/commands/base.py | 18 +++++++ youtool/commands/video_info.py | 17 ------- 4 files changed, 118 insertions(+), 17 deletions(-) create mode 100644 tests/commands/test_video_info.py diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index c87ddea..cc26e43 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -171,3 +171,23 @@ def test_data_to_csv_output(tmp_path): assert Path(output_file_path).is_file() assert expected_output == Path(output_file_path).read_text() assert str(output_file_path) == result + +def test_filter_fields(): + channel_info = { + 'channel_id': '123456', + 'channel_name': 'Test Channel', + 'subscribers': 1000, + 'videos': 50, + 'category': 'Tech' + } + + info_columns = ['channel_id', 'channel_name', 'subscribers'] + filtered_info = Command.filter_fields(channel_info, info_columns) + + expected_result = { + 'channel_id': '123456', + 'channel_name': 'Test Channel', + 'subscribers': 1000 + } + + assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" \ No newline at end of file diff --git a/tests/commands/test_video_info.py b/tests/commands/test_video_info.py new file mode 100644 index 0000000..49e3168 --- /dev/null +++ b/tests/commands/test_video_info.py @@ -0,0 +1,80 @@ +import csv +import pytest + +from unittest.mock import Mock +from pathlib import Path +from youtool.commands import VideoInfo + + +@pytest.fixture +def youtube_mock(mocker, mock_video_info): + mock = mocker.patch("youtool.commands.video_info.YouTube") + mock_instance = mock.return_value + mock_instance.videos_infos = Mock(return_value=mock_video_info) + return mock_instance + +@pytest.fixture +def mock_video_info(): + return [ + {"id": "tmrhPou85HQ", "title": "Title 1", "description": "Description 1", "published_at": "2021-01-01", "view_count": 100, "like_count": 10, "comment_count": 5}, + {"id": "qoI_x9fylaw", "title": "Title 2", "description": "Description 2", "published_at": "2021-02-01", "view_count": 200, "like_count": 20, "comment_count": 10} + ] + +def test_execute_with_ids_and_urls(youtube_mock, mocker, tmp_path, mock_video_info): + ids = ["tmrhPou85HQ", "qoI_x9fylaw"] + urls = ["https://www.youtube.com/watch?v=tmrhPou85HQ&ab_channel=Turicas", "https://www.youtube.com/watch?v=qoI_x9fylaw&ab_channel=PythonicCaf%C3%A9"] + output_file_path = tmp_path / "output.csv" + + VideoInfo.execute(ids=ids, urls=urls, output_file_path=str(output_file_path), api_key="test_api_key") + + assert Path(output_file_path).is_file() + with open(output_file_path, 'r') as f: + reader = csv.DictReader(f) + csv_data = list(reader) + + assert csv_data[0]["id"] == "tmrhPou85HQ" + assert csv_data[1]["id"] == "qoI_x9fylaw" + +def test_execute_missing_arguments(): + with pytest.raises(Exception) as exc_info: + VideoInfo.execute(api_key="test_api_key") + + assert str(exc_info.value) == "Either 'ids' or 'urls' must be provided for the video-info command" + +def test_execute_with_input_file_path(youtube_mock, mocker, tmp_path, mock_video_info): + input_csv_content = """video_id,video_url + tmrhPou85HQ,https://www.youtube.com/watch?v=tmrhPou85HQ&ab_channel=Turicas + qoI_x9fylaw,https://www.youtube.com/watch?v=qoI_x9fylaw&ab_channel=PythonicCaf%C3%A9 + """ + input_file_path = tmp_path / "input.csv" + output_file_path = tmp_path / "output.csv" + + with open(input_file_path, 'w') as f: + f.write(input_csv_content) + + VideoInfo.execute(input_file_path=str(input_file_path), output_file_path=str(output_file_path), api_key="test_api_key") + + assert Path(output_file_path).is_file() + with open(output_file_path, 'r') as f: + reader = csv.DictReader(f) + csv_data = list(reader) + + assert csv_data[0]["id"] == "tmrhPou85HQ" + assert csv_data[1]["id"] == "qoI_x9fylaw" + + +def test_execute_with_info_columns(youtube_mock, mocker, tmp_path, mock_video_info): + ids = ["tmrhPou85HQ", "qoI_x9fylaw"] + output_file_path = tmp_path / "output.csv" + + VideoInfo.execute(ids=ids, output_file_path=str(output_file_path), api_key="test_api_key", info_columns="id,title") + + assert Path(output_file_path).is_file() + with open(output_file_path, 'r') as f: + reader = csv.DictReader(f) + csv_data = list(reader) + + assert csv_data[0]["id"] == "tmrhPou85HQ" + assert csv_data[0]["title"] == "Title 1" + assert csv_data[1]["id"] == "qoI_x9fylaw" + assert csv_data[1]["title"] == "Title 2" diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 414b813..a2ac387 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -50,6 +50,24 @@ def parse_arguments(cls, subparsers: argparse._SubParsersAction) -> None: parser.add_argument(argument_name, **argument_copy) parser.set_defaults(func=cls.execute) + @staticmethod + def filter_fields(video_info: Dict, info_columns: Optional[List] = None) -> Dict: + """Filters the fields of a dictionary containing video information based on specified columns. + + Args: + video_info (Dict): A dictionary containing video information. + info_columns (Optional[List], optional): A list specifying which fields to include in the filtered output. + If None, returns the entire video_info dictionary. Defaults to None. + + Returns: + A dictionary containing only the fields specified in info_columns (if provided) + or the entire video_info dictionary if info_columns is None. + """ + return { + field: value for field, value in video_info.items() if field in info_columns + } if info_columns else video_info + + @classmethod def execute(cls, **kwargs) -> str: # noqa: D417 """Executes the command. diff --git a/youtool/commands/video_info.py b/youtool/commands/video_info.py index 35ca6a1..f5f344b 100644 --- a/youtool/commands/video_info.py +++ b/youtool/commands/video_info.py @@ -24,23 +24,6 @@ class VideoInfo(Command): "id", "title", "description", "published_at", "view_count", "like_count", "comment_count" ] - @staticmethod - def filter_fields(video_info: Dict, info_columns: Optional[List] = None) -> Dict: - """Filters the fields of a dictionary containing video information based on specified columns. - - Args: - video_info (Dict): A dictionary containing video information. - info_columns (Optional[List], optional): A list specifying which fields to include in the filtered output. - If None, returns the entire video_info dictionary. Defaults to None. - - Returns: - A dictionary containing only the fields specified in info_columns (if provided) - or the entire video_info dictionary if info_columns is None. - """ - return { - field: value for field, value in video_info.items() if field in info_columns - } if info_columns else video_info - @classmethod def execute(cls: Self, **kwargs) -> str: """ From 9572aedffd0f12e9b5a4dc984ad1fbb21bee913c Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 16:48:57 -0300 Subject: [PATCH 044/115] fix --- tests/commands/test_video_info.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/commands/test_video_info.py b/tests/commands/test_video_info.py index 49e3168..ed01c35 100644 --- a/tests/commands/test_video_info.py +++ b/tests/commands/test_video_info.py @@ -15,10 +15,10 @@ def youtube_mock(mocker, mock_video_info): @pytest.fixture def mock_video_info(): - return [ - {"id": "tmrhPou85HQ", "title": "Title 1", "description": "Description 1", "published_at": "2021-01-01", "view_count": 100, "like_count": 10, "comment_count": 5}, - {"id": "qoI_x9fylaw", "title": "Title 2", "description": "Description 2", "published_at": "2021-02-01", "view_count": 200, "like_count": 20, "comment_count": 10} - ] + return [ + {"id": "tmrhPou85HQ", "title": "Title 1", "description": "Description 1", "published_at": "2021-01-01", "view_count": 100, "like_count": 10, "comment_count": 5}, + {"id": "qoI_x9fylaw", "title": "Title 2", "description": "Description 2", "published_at": "2021-02-01", "view_count": 200, "like_count": 20, "comment_count": 10} + ] def test_execute_with_ids_and_urls(youtube_mock, mocker, tmp_path, mock_video_info): ids = ["tmrhPou85HQ", "qoI_x9fylaw"] From 5de983d5afb357640d79c8ea1c071df52cacdccd Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 15:48:36 -0300 Subject: [PATCH 045/115] add docstrings --- tests/commands/test_video_info.py | 26 ++++++++++++++++++++++++++ youtool/commands/video_info.py | 13 ++++++------- 2 files changed, 32 insertions(+), 7 deletions(-) diff --git a/tests/commands/test_video_info.py b/tests/commands/test_video_info.py index ed01c35..f4da48f 100644 --- a/tests/commands/test_video_info.py +++ b/tests/commands/test_video_info.py @@ -8,6 +8,7 @@ @pytest.fixture def youtube_mock(mocker, mock_video_info): + """Fixture to mock the YouTube instance and its videos_infos method.""" mock = mocker.patch("youtool.commands.video_info.YouTube") mock_instance = mock.return_value mock_instance.videos_infos = Mock(return_value=mock_video_info) @@ -15,12 +16,18 @@ def youtube_mock(mocker, mock_video_info): @pytest.fixture def mock_video_info(): + """Fixture to return mock video information.""" return [ {"id": "tmrhPou85HQ", "title": "Title 1", "description": "Description 1", "published_at": "2021-01-01", "view_count": 100, "like_count": 10, "comment_count": 5}, {"id": "qoI_x9fylaw", "title": "Title 2", "description": "Description 2", "published_at": "2021-02-01", "view_count": 200, "like_count": 20, "comment_count": 10} ] def test_execute_with_ids_and_urls(youtube_mock, mocker, tmp_path, mock_video_info): + """Test the execute method with provided video IDs and URLs. + + This test verifies that the execute method can handle both video IDs and URLs, + and correctly writes the video information to the output CSV file. + """ ids = ["tmrhPou85HQ", "qoI_x9fylaw"] urls = ["https://www.youtube.com/watch?v=tmrhPou85HQ&ab_channel=Turicas", "https://www.youtube.com/watch?v=qoI_x9fylaw&ab_channel=PythonicCaf%C3%A9"] output_file_path = tmp_path / "output.csv" @@ -36,12 +43,25 @@ def test_execute_with_ids_and_urls(youtube_mock, mocker, tmp_path, mock_video_in assert csv_data[1]["id"] == "qoI_x9fylaw" def test_execute_missing_arguments(): + """Test the execute method raises an exception when missing required arguments. + + This test verifies that the execute method raises an exception if neither + video IDs nor URLs are provided. + + Raises: + Exception: If neither 'ids' nor 'urls' is provided. + """ with pytest.raises(Exception) as exc_info: VideoInfo.execute(api_key="test_api_key") assert str(exc_info.value) == "Either 'ids' or 'urls' must be provided for the video-info command" def test_execute_with_input_file_path(youtube_mock, mocker, tmp_path, mock_video_info): + """Test the execute method with an input CSV file containing video URLs and IDs. + + This test verifies that the execute method can read video URLs and IDs from + an input CSV file and correctly writes the video information to the output CSV file. + """ input_csv_content = """video_id,video_url tmrhPou85HQ,https://www.youtube.com/watch?v=tmrhPou85HQ&ab_channel=Turicas qoI_x9fylaw,https://www.youtube.com/watch?v=qoI_x9fylaw&ab_channel=PythonicCaf%C3%A9 @@ -64,6 +84,12 @@ def test_execute_with_input_file_path(youtube_mock, mocker, tmp_path, mock_video def test_execute_with_info_columns(youtube_mock, mocker, tmp_path, mock_video_info): + """Test the execute method with specified info columns. + + This test verifies that the execute method can filter the video information + based on specified columns and correctly writes the filtered information + to the output CSV file. + """ ids = ["tmrhPou85HQ", "qoI_x9fylaw"] output_file_path = tmp_path / "output.csv" diff --git a/youtool/commands/video_info.py b/youtool/commands/video_info.py index f5f344b..bfa6534 100644 --- a/youtool/commands/video_info.py +++ b/youtool/commands/video_info.py @@ -32,25 +32,24 @@ def execute(cls: Self, **kwargs) -> str: Args: ids (list[str], optional): A list of YouTube video IDs. If not provided, input_file_path must be specified. urls (list[str], optional): A list of YouTube video URLs. If not provided, input_file_path must be specified. - urls_file_path (str, optional): Path to a CSV file containing YouTube channel URLs. - ids_file_path (str, optional): Path to a CSV file containing YouTube channel IDs. input_file_path (str, optional): Path to a CSV file containing YouTube video URLs or IDs. output_file_path (str, optional): Path to the output CSV file where video information will be saved. api_key (str): The API key to authenticate with the YouTube Data API. url_column_name (str, optional): The name of the column in the input_file_path CSV file that contains the URLs. - Default is "video_url". + Default is "video_url". id_column_name (str, optional): The name of the column in the input_file_path CSV file that contains the IDs. Default is "video_id". - info_columns (str, optional): Comma-separated list of columns to include in the output CSV. Default is the class attribute INFO_COLUMNS. + info_columns (str, optional): Comma-separated list of columns to include in the output CSV. + Default is the class attribute INFO_COLUMNS. Returns: - A message indicating the result of the command. If output_file_path is specified, - the message will include the path to the generated CSV file. - Otherwise, it will return the result as a string. + str: A message indicating the result of the command. If output_file_path is specified, the message will + include the path to the generated CSV file. Otherwise, it will return the result as a string. Raises: Exception: If neither ids, urls, nor input_file_path is provided. """ + ids = kwargs.get("ids", []) urls = kwargs.get("urls", []) input_file_path = kwargs.get("input_file_path") From e8ab076bfbb14ac39a4abf29c9eef29a314c10ee Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:22:26 -0300 Subject: [PATCH 046/115] Add updates docstrings --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index d42f311..c599982 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: # noqa: D417 + def execute(cls, **kwargs) -> str: """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From cac8aae4eea11d68f57b48fbeb887e3d6026b754 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:51:39 -0300 Subject: [PATCH 047/115] - Add updates --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index c599982..d42f311 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: + def execute(cls, **kwargs) -> str: # noqa: D417 """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From 797b4cb8eacaa076e36aed086c9001fe1ca39085 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:59:00 -0300 Subject: [PATCH 048/115] Add test for base file --- tests/commands/test_base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index cc26e43..0f15a64 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -2,6 +2,8 @@ import argparse import pytest +from io import StringIO +from datetime import datetime from pathlib import Path from unittest.mock import MagicMock, patch, mock_open from youtool.commands import Command @@ -190,4 +192,4 @@ def test_filter_fields(): 'subscribers': 1000 } - assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" \ No newline at end of file + assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" From 3061ca8aa2c31b0c9ead09ee112dde696fa0205f Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 12:40:16 -0300 Subject: [PATCH 049/115] Add update --- tests/commands/test_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 0f15a64..04194b6 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -113,7 +113,7 @@ def test_data_from_csv_column_not_found(mock_csv_file): file_path = Path("tests/resources/csv_column_not_found.csv") with pytest.raises(Exception) as exc_info: Command.data_from_csv(file_path, "NonExistentColumn") - assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value) + assert f"Column NonExistentColumn not found on {file_path}" in str(exc_info.value) @pytest.fixture From ca3edc14dad19e295c08585a13d8483bbf6d403a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 23:41:22 -0300 Subject: [PATCH 050/115] Add video_search command --- youtool/commands/video_search.py | 107 +++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 youtool/commands/video_search.py diff --git a/youtool/commands/video_search.py b/youtool/commands/video_search.py new file mode 100644 index 0000000..ce1d0fa --- /dev/null +++ b/youtool/commands/video_search.py @@ -0,0 +1,107 @@ +import csv + +from typing import List, Dict, Optional, Self + +from youtool import YouTube + +from .base import Command + + +class VideoSearch(Command): + """Search video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified video dict schema or option to get full video info) + """ + name = "video-search" + arguments = [ + {"name": "--ids", "type": str, "help": "Video IDs", "nargs": "*"}, + {"name": "--urls", "type": str, "help": "Video URLs", "nargs": "*"}, + {"name": "--input-file-path", "type": str, "help": "Input CSV file path with URLs/IDs"}, + {"name": "--output-file-path", "type": str, "help": "Output CSV file path"}, + {"name": "--full-info", "type": bool, "help": "Option to get full video info", "default": False} + ] + + ID_COLUMN_NAME: str = "video_id" + URL_COLUMN_NAME: str = "video_url" + INFO_COLUMNS: List[str] = [ + "id", "title", "published_at", "view_count" + ] + FULL_INFO_COLUMNS: List[str] = [ + "id", "title", "description", "published_at", "view_count", "like_count", "comment_count" + ] + + @staticmethod + def filter_fields(video_info: Dict, info_columns: Optional[List] = None) -> Dict: + """Filters the fields of a dictionary containing video information based on specified columns. + + Args: + video_info (Dict): A dictionary containing video information. + info_columns (Optional[List], optional): A list specifying which fields to include in the filtered output. + If None, returns the entire video_info dictionary. Defaults to None. + + Returns: + A dictionary containing only the fields specified in info_columns (if provided) + or the entire video_info dictionary if info_columns is None. + """ + return { + field: value for field, value in video_info.items() if field in info_columns + } if info_columns else video_info + + @classmethod + def execute(cls: Self, **kwargs) -> str: + """ + Execute the video-search command to fetch YouTube video information from IDs or URLs and save them to a CSV file. + + Args: + ids (list[str], optional): A list of YouTube video IDs. If not provided, input_file_path must be specified. + urls (list[str], optional): A list of YouTube video URLs. If not provided, input_file_path must be specified. + input_file_path (str, optional): Path to a CSV file containing YouTube video URLs or IDs. + output_file_path (str, optional): Path to the output CSV file where video information will be saved. + api_key (str): The API key to authenticate with the YouTube Data API. + full_info (bool, optional): Flag to indicate whether to get full video info. Default is False. + + Returns: + A message indicating the result of the command. If output_file_path is specified, + the message will include the path to the generated CSV file. + Otherwise, it will return the result as a string. + + Raises: + Exception: If neither ids, urls, nor input_file_path is provided. + """ + ids = kwargs.get("ids") + urls = kwargs.get("urls") + input_file_path = kwargs.get("input_file_path") + output_file_path = kwargs.get("output_file_path") + api_key = kwargs.get("api_key") + full_info = kwargs.get("full_info", False) + + info_columns = VideoSearch.FULL_INFO_COLUMNS if full_info else VideoSearch.SIMPLE_INFO_COLUMNS + + if input_file_path: + with open(input_file_path, mode='r') as infile: + reader = csv.DictReader(infile) + for row in reader: + if cls.ID_COLUMN_NAME in row and row[cls.ID_COLUMN_NAME]: + ids.append(row[cls.ID_COLUMN_NAME]) + elif cls.URL_COLUMN_NAME in row and row[cls.URL_COLUMN_NAME]: + urls.append(row[cls.URL_COLUMN_NAME]) + + if not ids and not urls: + raise Exception("Either 'ids' or 'urls' must be provided for the video-search command") + + youtube = YouTube([api_key], disable_ipv6=True) + + videos_infos = [] + + if ids: + videos_infos += list(youtube.videos_infos(ids)) + if urls: + # TODO: add get videos_infos using urls to youtool + raise NotImplementedError("videos_infos by url not implemented yet") + + return cls.data_to_csv( + data=[ + VideoSearch.filter_fields( + video_info, info_columns + ) for video_info in videos_infos + ], + output_file_path=output_file_path + ) From fb6391e32ec9cfec1a24d94b3346955f089f3b4d Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 15:28:21 -0300 Subject: [PATCH 051/115] Fix --- youtool/commands/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py index 51afcfc..222d48e 100644 --- a/youtool/commands/__init__.py +++ b/youtool/commands/__init__.py @@ -2,13 +2,15 @@ from .channel_id import ChannelId from .channel_info import ChannelInfo from .video_info import VideoInfo +from .video_search import VideoSearch COMMANDS = [ ChannelId, ChannelInfo, - VideoInfo + VideoInfo, + VideoSearch ] __all__ = [ - COMMANDS, ChannelId, ChannelInfo, VideoInfo + "Command", "COMMANDS", "ChannelId", "ChannelInfo", "VideoInfo", "VideoSearch" ] From 301a2e00edc072ac741c8cfbb10f09f2bce51591 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 15:28:55 -0300 Subject: [PATCH 052/115] Add update --- youtool/commands/video_search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/video_search.py b/youtool/commands/video_search.py index ce1d0fa..390ce04 100644 --- a/youtool/commands/video_search.py +++ b/youtool/commands/video_search.py @@ -73,7 +73,7 @@ def execute(cls: Self, **kwargs) -> str: api_key = kwargs.get("api_key") full_info = kwargs.get("full_info", False) - info_columns = VideoSearch.FULL_INFO_COLUMNS if full_info else VideoSearch.SIMPLE_INFO_COLUMNS + info_columns = VideoSearch.FULL_INFO_COLUMNS if full_info else VideoSearch.INFO_COLUMNS if input_file_path: with open(input_file_path, mode='r') as infile: From 68d5ea5bacdb3ee181501710969dfd788b61d770 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 20:23:32 -0300 Subject: [PATCH 053/115] add video-search from url case --- youtool/commands/video_search.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/youtool/commands/video_search.py b/youtool/commands/video_search.py index 390ce04..db49ea7 100644 --- a/youtool/commands/video_search.py +++ b/youtool/commands/video_search.py @@ -66,8 +66,8 @@ def execute(cls: Self, **kwargs) -> str: Raises: Exception: If neither ids, urls, nor input_file_path is provided. """ - ids = kwargs.get("ids") - urls = kwargs.get("urls") + ids = kwargs.get("ids", []) + urls = kwargs.get("urls", []) input_file_path = kwargs.get("input_file_path") output_file_path = kwargs.get("output_file_path") api_key = kwargs.get("api_key") @@ -89,13 +89,12 @@ def execute(cls: Self, **kwargs) -> str: youtube = YouTube([api_key], disable_ipv6=True) - videos_infos = [] - - if ids: - videos_infos += list(youtube.videos_infos(ids)) if urls: - # TODO: add get videos_infos using urls to youtool - raise NotImplementedError("videos_infos by url not implemented yet") + ids += [cls.video_id_from_url(url) for url in urls] + + # Remove duplicated + ids = list(set(ids)) + videos_infos = list(youtube.videos_infos([_id for _id in ids if _id])) return cls.data_to_csv( data=[ From 6ab1762d3bb04081f34fc8274dc0fc2f853ce24a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 13:57:55 -0300 Subject: [PATCH 054/115] - Add test for channel_info command; - Add update channel_info file; - fix test_base --- tests/commands/test_base.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 04194b6..afbcf06 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -2,8 +2,6 @@ import argparse import pytest -from io import StringIO -from datetime import datetime from pathlib import Path from unittest.mock import MagicMock, patch, mock_open from youtool.commands import Command From 937ad3dc0b7b29c0fb01e3d2cc57f786940e689a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 21:32:42 -0300 Subject: [PATCH 055/115] - Add test for video_search command; - Add updates to some test files; - created conftest file --- tests/commands/conftest.py | 29 +++++++++++++ tests/commands/test_channel_info.py | 4 +- tests/commands/test_video_search.py | 66 +++++++++++++++++++++++++++++ youtool/commands/base.py | 2 +- youtool/commands/video_search.py | 44 +++++++------------ 5 files changed, 113 insertions(+), 32 deletions(-) create mode 100644 tests/commands/conftest.py create mode 100644 tests/commands/test_video_search.py diff --git a/tests/commands/conftest.py b/tests/commands/conftest.py new file mode 100644 index 0000000..9970eab --- /dev/null +++ b/tests/commands/conftest.py @@ -0,0 +1,29 @@ +import pytest + + +@pytest.fixture +def channels_urls(): + return [ + "https://www.youtube.com/@Turicas/featured", + "https://www.youtube.com/c/PythonicCaf%C3%A9" + ] + + +@pytest.fixture +def videos_ids(): + return [ + "video_id_1", + "video_id_2" + ] + + +@pytest.fixture +def videos_urls(videos_ids): + return [ + f"https://www.youtube.com/?v={video_id}" for video_id in videos_ids + ] + + +@pytest.fixture +def usernames(): + return ["Turicas", "PythonicCafe"] diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 67f5c3d..55c17d1 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -52,10 +52,10 @@ def test_channel_ids_from_urls_and_usernames(mocker): youtube_mock.return_value.channel_id_from_username = channel_id_from_username_mock youtube_mock.return_value.channels_infos = channels_infos_mock - ChannelInfo.execute(urls=urls, usernames=usernames) + ChannelInfo.execute(urls=channels_urls, usernames=usernames) channel_id_from_url_mock.assert_has_calls( - [call(url) for url in urls] + [call(url) for url in channels_urls] ) channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] diff --git a/tests/commands/test_video_search.py b/tests/commands/test_video_search.py new file mode 100644 index 0000000..feeb7ef --- /dev/null +++ b/tests/commands/test_video_search.py @@ -0,0 +1,66 @@ +import csv +import pytest + +from io import StringIO +from unittest.mock import Mock, call +from datetime import datetime + +from youtool.commands.video_search import VideoSearch + + +def test_video_search_string_output(mocker, videos_ids, videos_urls): + youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") + expected_videos_infos = [ + { + column: f"v_{index}" for column in VideoSearch.INFO_COLUMNS + } for index, _ in enumerate(videos_ids) + ] + + csv_file = StringIO() + csv_writer = csv.DictWriter(csv_file, fieldnames=VideoSearch.INFO_COLUMNS) + csv_writer.writeheader() + csv_writer.writerows(expected_videos_infos) + + videos_infos_mock = Mock(return_value=expected_videos_infos) + youtube_mock.return_value.videos_infos = videos_infos_mock + + result = VideoSearch.execute(ids=videos_ids, urls=videos_urls) + + videos_infos_mock.assert_called_once_with(list(set(videos_ids))) + assert result == csv_file.getvalue() + + +def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path): + youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") + expected_videos_infos = [ + { + column: f"v_{index}" for column in VideoSearch.INFO_COLUMNS + } for index, _ in enumerate(videos_ids) + ] + + expected_csv_file = StringIO() + csv_writer = csv.DictWriter(expected_csv_file, fieldnames=VideoSearch.INFO_COLUMNS) + csv_writer.writeheader() + csv_writer.writerows(expected_videos_infos) + + timestamp = datetime.now().strftime("%f") + output_file_name = f"output_{timestamp}.csv" + output_file_path = tmp_path / output_file_name + + videos_infos_mock = Mock(return_value=expected_videos_infos) + youtube_mock.return_value.videos_infos = videos_infos_mock + + result_file_path = VideoSearch.execute( + ids=videos_ids, urls=videos_urls, output_file_path=output_file_path + ) + + with open(result_file_path, "r") as result_csv_file: + result_csv = result_csv_file.read() + + videos_infos_mock.assert_called_once_with(list(set(videos_ids))) + assert result_csv.replace("\r", "") == expected_csv_file.getvalue().replace("\r", "") + + +def test_video_search_no_id_and_url_error(): + with pytest.raises(Exception, match="Either 'ids' or 'urls' must be provided"): + VideoSearch.execute(ids=None, urls=None) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index a2ac387..20e1708 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -105,7 +105,7 @@ def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> Li if fieldnames is None: raise ValueError("Fieldnames is None") - + if data_column_name not in fieldnames: raise Exception(f"Column {data_column_name} not found on {file_path}") for row in reader: diff --git a/youtool/commands/video_search.py b/youtool/commands/video_search.py index db49ea7..8864aae 100644 --- a/youtool/commands/video_search.py +++ b/youtool/commands/video_search.py @@ -8,7 +8,9 @@ class VideoSearch(Command): - """Search video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified video dict schema or option to get full video info) + """ + Search video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), + generate CSV output (simplified video dict schema or option to get full video info) """ name = "video-search" arguments = [ @@ -16,7 +18,9 @@ class VideoSearch(Command): {"name": "--urls", "type": str, "help": "Video URLs", "nargs": "*"}, {"name": "--input-file-path", "type": str, "help": "Input CSV file path with URLs/IDs"}, {"name": "--output-file-path", "type": str, "help": "Output CSV file path"}, - {"name": "--full-info", "type": bool, "help": "Option to get full video info", "default": False} + {"name": "--full-info", "type": bool, "help": "Option to get full video info", "default": False}, + {"name": "--url-column-name", "type": str, "help": "URL column name on csv input files"}, + {"name": "--id-column-name", "type": str, "help": "Channel ID column name on csv output files"} ] ID_COLUMN_NAME: str = "video_id" @@ -28,23 +32,6 @@ class VideoSearch(Command): "id", "title", "description", "published_at", "view_count", "like_count", "comment_count" ] - @staticmethod - def filter_fields(video_info: Dict, info_columns: Optional[List] = None) -> Dict: - """Filters the fields of a dictionary containing video information based on specified columns. - - Args: - video_info (Dict): A dictionary containing video information. - info_columns (Optional[List], optional): A list specifying which fields to include in the filtered output. - If None, returns the entire video_info dictionary. Defaults to None. - - Returns: - A dictionary containing only the fields specified in info_columns (if provided) - or the entire video_info dictionary if info_columns is None. - """ - return { - field: value for field, value in video_info.items() if field in info_columns - } if info_columns else video_info - @classmethod def execute(cls: Self, **kwargs) -> str: """ @@ -68,21 +55,20 @@ def execute(cls: Self, **kwargs) -> str: """ ids = kwargs.get("ids", []) urls = kwargs.get("urls", []) - input_file_path = kwargs.get("input_file_path") output_file_path = kwargs.get("output_file_path") api_key = kwargs.get("api_key") full_info = kwargs.get("full_info", False) + url_column_name = kwargs.get("url_column_name", cls.URL_COLUMN_NAME) + id_column_name = kwargs.get("id_column_name", cls.ID_COLUMN_NAME) + info_columns = VideoSearch.FULL_INFO_COLUMNS if full_info else VideoSearch.INFO_COLUMNS - if input_file_path: - with open(input_file_path, mode='r') as infile: - reader = csv.DictReader(infile) - for row in reader: - if cls.ID_COLUMN_NAME in row and row[cls.ID_COLUMN_NAME]: - ids.append(row[cls.ID_COLUMN_NAME]) - elif cls.URL_COLUMN_NAME in row and row[cls.URL_COLUMN_NAME]: - urls.append(row[cls.URL_COLUMN_NAME]) + if (input_file_path := kwargs.get("input_file_path")): + if (urls_from_csv := cls.data_from_csv(input_file_path, url_column_name)): + ids += [cls.video_id_from_url(url) for url in urls_from_csv] + if (ids_from_csv := cls.data_from_csv(input_file_path, id_column_name)): + ids += ids_from_csv if not ids and not urls: raise Exception("Either 'ids' or 'urls' must be provided for the video-search command") @@ -95,7 +81,7 @@ def execute(cls: Self, **kwargs) -> str: # Remove duplicated ids = list(set(ids)) videos_infos = list(youtube.videos_infos([_id for _id in ids if _id])) - + return cls.data_to_csv( data=[ VideoSearch.filter_fields( From 00a30972113a5251ea62720eeb72c5fcfde85721 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 15:58:47 -0300 Subject: [PATCH 056/115] add docstrings --- tests/commands/test_video_search.py | 20 +++++++++++++++++++- youtool/commands/video_search.py | 4 +++- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/tests/commands/test_video_search.py b/tests/commands/test_video_search.py index feeb7ef..8b0572c 100644 --- a/tests/commands/test_video_search.py +++ b/tests/commands/test_video_search.py @@ -2,13 +2,18 @@ import pytest from io import StringIO -from unittest.mock import Mock, call +from unittest.mock import Mock from datetime import datetime from youtool.commands.video_search import VideoSearch def test_video_search_string_output(mocker, videos_ids, videos_urls): + """Test the execution of the video-search command and verify the output as string. + + This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs, + and checks if the output is correctly formatted as a CSV string. + """ youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") expected_videos_infos = [ { @@ -31,6 +36,11 @@ def test_video_search_string_output(mocker, videos_ids, videos_urls): def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path): + """Test the execution of the video-search command and verify the output to a file. + + This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs, + and checks if the output is correctly written to a CSV file. + """ youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") expected_videos_infos = [ { @@ -62,5 +72,13 @@ def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path): def test_video_search_no_id_and_url_error(): + """Test if the video-search command raises an exception when neither IDs nor URLs are provided. + + This test checks if executing the `VideoSearch.execute` command without providing IDs or URLs + raises the expected exception. + + Assertions: + - Assert that the raised exception matches the expected error message. + """ with pytest.raises(Exception, match="Either 'ids' or 'urls' must be provided"): VideoSearch.execute(ids=None, urls=None) diff --git a/youtool/commands/video_search.py b/youtool/commands/video_search.py index 8864aae..4713a84 100644 --- a/youtool/commands/video_search.py +++ b/youtool/commands/video_search.py @@ -44,9 +44,11 @@ def execute(cls: Self, **kwargs) -> str: output_file_path (str, optional): Path to the output CSV file where video information will be saved. api_key (str): The API key to authenticate with the YouTube Data API. full_info (bool, optional): Flag to indicate whether to get full video info. Default is False. + url_column_name (str, optional): The name of the column in the input CSV file that contains the URLs. Default is "video_url". + id_column_name (str, optional): The name of the column in the input CSV file that contains the IDs. Default is "video_id". Returns: - A message indicating the result of the command. If output_file_path is specified, + str: A message indicating the result of the command. If output_file_path is specified, the message will include the path to the generated CSV file. Otherwise, it will return the result as a string. From 0552a621490776927e33b84238debd01b5559a40 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 23:01:10 -0300 Subject: [PATCH 057/115] add updates channel_info test --- tests/commands/test_channel_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 55c17d1..62623e8 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,7 +31,7 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker): +def test_channel_ids_from_urls_and_usernames(mocker, channels_urls): """Test to verify fetching channel IDs from both URLs and usernames. This test checks if the `execute` method of the `ChannelInfo` class correctly fetches channel IDs From 4dc8f34796a2b28806a7a7d91356affb08a3f9f0 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 00:06:54 -0300 Subject: [PATCH 058/115] Remove unnecessary comment --- youtool/cli.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index 7a430e5..28b055c 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -25,8 +25,6 @@ def main(): subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed") - # cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") - # cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") # cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") # cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") From cfa05328904ac7966f372ac40f26937f37135fa7 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 00:07:41 -0300 Subject: [PATCH 059/115] Add video_comments command --- youtool/commands/video_comments.py | 47 ++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 youtool/commands/video_comments.py diff --git a/youtool/commands/video_comments.py b/youtool/commands/video_comments.py new file mode 100644 index 0000000..ec07e18 --- /dev/null +++ b/youtool/commands/video_comments.py @@ -0,0 +1,47 @@ +import csv +from typing import List, Dict, Optional, Self + +from youtool import YouTube +from .base import Command + +class VideoComments(Command): + """Get comments from a video ID, generate CSV output (same schema for comment dicts)""" + + name = "video-comments" + arguments = [ + {"name": "--id", "type": str, "help": "Video ID", "required": True}, + {"name": "--output-file-path", "type": str, "help": "Output CSV file path"} + ] + + COMMENT_COLUMNS: List[str] = [ + "comment_id", "author_display_name", "text_display", "like_count", "published_at" + ] + + @classmethod + def execute(cls: Self, **kwargs) -> str: + """ + Execute the get-comments command to fetch comments from a YouTube video and save them to a CSV file. + + Args: + id (str): The ID of the YouTube video. + output_file_path (str): Path to the output CSV file where comments will be saved. + api_key (str): The API key to authenticate with the YouTube Data API. + + Returns: + A message indicating the result of the command. If output_file_path is specified, + the message will include the path to the generated CSV file. + Otherwise, it will return the result as a string. + """ + video_id = kwargs.get("id") + output_file_path = kwargs.get("output_file_path") + api_key = kwargs.get("api_key") + + youtube = YouTube([api_key], disable_ipv6=True) + + comments = list(youtube.video_comments(video_id)) + + return cls.data_to_csv( + data=comments, + output_file_path=output_file_path + ) + \ No newline at end of file From 221770cbc1f440a9c071f04433e6ab632b167a13 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 16:26:02 -0300 Subject: [PATCH 060/115] Add update --- youtool/commands/__init__.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py index 222d48e..72913ce 100644 --- a/youtool/commands/__init__.py +++ b/youtool/commands/__init__.py @@ -3,14 +3,16 @@ from .channel_info import ChannelInfo from .video_info import VideoInfo from .video_search import VideoSearch +from .video_comments import VideoComments COMMANDS = [ ChannelId, ChannelInfo, VideoInfo, - VideoSearch + VideoSearch, + VideoComments ] __all__ = [ - "Command", "COMMANDS", "ChannelId", "ChannelInfo", "VideoInfo", "VideoSearch" + "Command", "COMMANDS", "ChannelId", "ChannelInfo", "VideoInfo", "VideoSearch", "VideoComments" ] From cfcccbcb3c57b288fefc53d6956470b05beac347 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 21:32:42 -0300 Subject: [PATCH 061/115] - Add test for video_search command; - Add updates to some test files; - created conftest file --- tests/commands/test_video_search.py | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/commands/test_video_search.py b/tests/commands/test_video_search.py index 8b0572c..912bcde 100644 --- a/tests/commands/test_video_search.py +++ b/tests/commands/test_video_search.py @@ -2,18 +2,25 @@ import pytest from io import StringIO +<<<<<<< HEAD from unittest.mock import Mock +======= +from unittest.mock import Mock, call +>>>>>>> 0e02e77 (- Add test for video_search command;) from datetime import datetime from youtool.commands.video_search import VideoSearch def test_video_search_string_output(mocker, videos_ids, videos_urls): +<<<<<<< HEAD """Test the execution of the video-search command and verify the output as string. This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs, and checks if the output is correctly formatted as a CSV string. """ +======= +>>>>>>> 0e02e77 (- Add test for video_search command;) youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") expected_videos_infos = [ { @@ -36,11 +43,14 @@ def test_video_search_string_output(mocker, videos_ids, videos_urls): def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path): +<<<<<<< HEAD """Test the execution of the video-search command and verify the output to a file. This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs, and checks if the output is correctly written to a CSV file. """ +======= +>>>>>>> 0e02e77 (- Add test for video_search command;) youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") expected_videos_infos = [ { @@ -72,6 +82,7 @@ def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path): def test_video_search_no_id_and_url_error(): +<<<<<<< HEAD """Test if the video-search command raises an exception when neither IDs nor URLs are provided. This test checks if executing the `VideoSearch.execute` command without providing IDs or URLs @@ -80,5 +91,7 @@ def test_video_search_no_id_and_url_error(): Assertions: - Assert that the raised exception matches the expected error message. """ +======= +>>>>>>> 0e02e77 (- Add test for video_search command;) with pytest.raises(Exception, match="Either 'ids' or 'urls' must be provided"): VideoSearch.execute(ids=None, urls=None) From 4112d021b7c305004c06cb08e5ce501e7b0608eb Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 22:22:02 -0300 Subject: [PATCH 062/115] - Add test for video_comments command --- tests/commands/test_video_comments.py | 59 +++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 tests/commands/test_video_comments.py diff --git a/tests/commands/test_video_comments.py b/tests/commands/test_video_comments.py new file mode 100644 index 0000000..fc19164 --- /dev/null +++ b/tests/commands/test_video_comments.py @@ -0,0 +1,59 @@ +import csv +import pytest + +from io import StringIO +from datetime import datetime +from unittest.mock import Mock +from youtool.commands import VideoComments + + +def test_video_comments(mocker): + youtube_mock = mocker.patch("youtool.commands.video_comments.YouTube") + video_id = "video_id_mock" + + expected_result = [ + {"text": "my_comment", "author": "my_name"} + ] + + csv_file = StringIO() + csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys()) + csv_writer.writeheader() + csv_writer.writerows(expected_result) + + videos_comments_mock = Mock(return_value=expected_result) + youtube_mock.return_value.video_comments = videos_comments_mock + result = VideoComments.execute(id=video_id) + + videos_comments_mock.assert_called_once_with(video_id) + + assert result == csv_file.getvalue() + + +def test_video_comments_with_file_output(mocker, tmp_path): + youtube_mock = mocker.patch("youtool.commands.video_comments.YouTube") + video_id = "video_id_mock" + + expected_result = [ + {"text": "my_comment", "author": "my_name"} + ] + + csv_file = StringIO() + csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys()) + csv_writer.writeheader() + csv_writer.writerows(expected_result) + + timestamp = datetime.now().strftime("%f") + output_file_name = f"output_{timestamp}.csv" + output_file_path = tmp_path / output_file_name + + videos_comments_mock = Mock(return_value=expected_result) + youtube_mock.return_value.video_comments = videos_comments_mock + + result_file_path = VideoComments.execute(id=video_id, output_file_path=output_file_path) + + with open(result_file_path, "r") as result_csv_file: + result_csv = result_csv_file.read() + + videos_comments_mock.assert_called_once_with(video_id) + + assert result_csv.replace("\r", "") == csv_file.getvalue().replace("\r", "") From 59d1ad513309889c4adfb9e0b440447534231aae Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 16:04:55 -0300 Subject: [PATCH 063/115] add docstrings --- tests/commands/test_video_comments.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tests/commands/test_video_comments.py b/tests/commands/test_video_comments.py index fc19164..386c5de 100644 --- a/tests/commands/test_video_comments.py +++ b/tests/commands/test_video_comments.py @@ -8,6 +8,11 @@ def test_video_comments(mocker): + """Test case for fetching video comments and verifying the output. + + This test mocks the YouTube API to simulate fetching comments for a video, + then compares the generated CSV output with expected comments. + """ youtube_mock = mocker.patch("youtool.commands.video_comments.YouTube") video_id = "video_id_mock" @@ -30,6 +35,11 @@ def test_video_comments(mocker): def test_video_comments_with_file_output(mocker, tmp_path): + """Test case for fetching video comments and saving them to a CSV file. + + This test mocks the YouTube API to simulate fetching comments for a video, + then saves the comments to a temporary CSV file. + """ youtube_mock = mocker.patch("youtool.commands.video_comments.YouTube") video_id = "video_id_mock" From bd78a70468b6f8d77c829830e2e9462077a4fcf8 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 15:15:30 -0300 Subject: [PATCH 064/115] Add video_livechat command --- youtool/commands/video_livechat.py | 81 ++++++++++++++++++++++++++++++ 1 file changed, 81 insertions(+) create mode 100644 youtool/commands/video_livechat.py diff --git a/youtool/commands/video_livechat.py b/youtool/commands/video_livechat.py new file mode 100644 index 0000000..775b857 --- /dev/null +++ b/youtool/commands/video_livechat.py @@ -0,0 +1,81 @@ +import csv +from typing import List, Dict, Optional, Self +from chat_downloader import ChatDownloader +from chat_downloader.errors import ChatDisabled, LoginRequired, NoChatReplay +from .base import Command +from datetime import datetime + +class VideoLiveChat(Command): + """Get live chat comments from a video ID, generate CSV output (same schema for chat_message dicts)""" + name = "video-livechat" + arguments = [ + {"name": "--id", "type": str, "help": "Video ID", "required": True}, + {"name": "--output-file-path", "type": str, "help": "Output CSV file path"}, + {"name": "--expand-emojis", "type": bool, "help": "Expand emojis in chat messages", "default": True} + ] + + CHAT_COLUMNS: List[str] = [ + "id", "video_id", "created_at", "type", "action", "video_time", + "author", "author_id", "author_image_url", "text", + "money_currency", "money_amount" + ] + + @staticmethod + def parse_timestamp(timestamp: str) -> str: + return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S') + + @staticmethod + def parse_decimal(value: Optional[str]) -> Optional[float]: + return float(value.replace(',', '')) if value else None + + @classmethod + def execute(cls: Self, **kwargs) -> str: + """ + Execute the video-livechat command to fetch live chat messages from a YouTube video and save them to a CSV file. + + Args: + id (str): The ID of the YouTube video. + output_file_path (str): Path to the output CSV file where chat messages will be saved. + expand_emojis (bool): Whether to expand emojis in chat messages. Defaults to True. + api_key (str): The API key to authenticate with the YouTube Data API. + + Returns: + A message indicating the result of the command. If output_file_path is specified, + the message will include the path to the generated CSV file. + Otherwise, it will return the result as a string. + """ + video_id = kwargs.get("id") + output_file_path = kwargs.get("output_file_path") + expand_emojis = kwargs.get("expand_emojis", True) + + downloader = ChatDownloader() + video_url = f"https://youtube.com/watch?v={video_id}" + + chat_messages = [] + try: + live = downloader.get_chat(video_url, message_groups=["messages", "superchat"]) + for message in live: + text = message["message"] + if expand_emojis: + for emoji in message.get("emotes", []): + for shortcut in emoji["shortcuts"]: + text = text.replace(shortcut, emoji["id"]) + money = message.get("money", {}) or {} + chat_messages.append({ + "id": message["message_id"], + "video_id": video_id, + "created_at": cls.parse_timestamp(message["timestamp"]), + "type": message["message_type"], + "action": message["action_type"], + "video_time": float(message["time_in_seconds"]), + "author": message["author"]["name"], + "author_id": message["author"]["id"], + "author_image_url": [img for img in message["author"]["images"] if img["id"] == "source"][0]["url"], + "text": text, + "money_currency": money.get("currency"), + "money_amount": cls.parse_decimal(money.get("amount")), + }) + except (LoginRequired, NoChatReplay, ChatDisabled): + raise + + return cls.data_to_csv(chat_messages, output_file_path) From bc966433fe672dab2332de84279e4ae747b911cc Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 21:32:42 -0300 Subject: [PATCH 065/115] - Add test for video_search command; - Add updates to some test files; - created conftest file --- tests/commands/test_video_search.py | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/tests/commands/test_video_search.py b/tests/commands/test_video_search.py index 912bcde..a30a879 100644 --- a/tests/commands/test_video_search.py +++ b/tests/commands/test_video_search.py @@ -2,25 +2,19 @@ import pytest from io import StringIO -<<<<<<< HEAD from unittest.mock import Mock -======= -from unittest.mock import Mock, call ->>>>>>> 0e02e77 (- Add test for video_search command;) + from datetime import datetime from youtool.commands.video_search import VideoSearch def test_video_search_string_output(mocker, videos_ids, videos_urls): -<<<<<<< HEAD """Test the execution of the video-search command and verify the output as string. This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs, and checks if the output is correctly formatted as a CSV string. """ -======= ->>>>>>> 0e02e77 (- Add test for video_search command;) youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") expected_videos_infos = [ { @@ -43,14 +37,11 @@ def test_video_search_string_output(mocker, videos_ids, videos_urls): def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path): -<<<<<<< HEAD """Test the execution of the video-search command and verify the output to a file. This test simulates the execution of the `VideoSearch.execute` command with a list of video IDs and URLs, and checks if the output is correctly written to a CSV file. """ -======= ->>>>>>> 0e02e77 (- Add test for video_search command;) youtube_mock = mocker.patch("youtool.commands.video_search.YouTube") expected_videos_infos = [ { @@ -82,7 +73,6 @@ def test_video_search_file_output(mocker, videos_ids, videos_urls, tmp_path): def test_video_search_no_id_and_url_error(): -<<<<<<< HEAD """Test if the video-search command raises an exception when neither IDs nor URLs are provided. This test checks if executing the `VideoSearch.execute` command without providing IDs or URLs @@ -91,7 +81,6 @@ def test_video_search_no_id_and_url_error(): Assertions: - Assert that the raised exception matches the expected error message. """ -======= ->>>>>>> 0e02e77 (- Add test for video_search command;) + with pytest.raises(Exception, match="Either 'ids' or 'urls' must be provided"): VideoSearch.execute(ids=None, urls=None) From 6db04480509227964f7e20239704cba15dfa1a32 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 22:32:13 -0300 Subject: [PATCH 066/115] - Add test for video_livechat command --- tests/commands/test_video_livechat.py | 59 +++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 tests/commands/test_video_livechat.py diff --git a/tests/commands/test_video_livechat.py b/tests/commands/test_video_livechat.py new file mode 100644 index 0000000..6f22ad9 --- /dev/null +++ b/tests/commands/test_video_livechat.py @@ -0,0 +1,59 @@ +import csv +import pytest + +from io import StringIO +from datetime import datetime +from unittest.mock import Mock +from youtool.commands import VideoLiveChat + + +def test_video_livechat(mocker): + youtube_mock = mocker.patch("youtool.commands.video_livechat.YouTube") + video_id = "video_id_mock" + + expected_result = [ + {column: "data" for column in VideoLiveChat.CHAT_MESSAGE_COLUMNS} + ] + + csv_file = StringIO() + csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys()) + csv_writer.writeheader() + csv_writer.writerows(expected_result) + + videos_livechat_mock = Mock(return_value=expected_result) + youtube_mock.return_value.video_livechat = videos_livechat_mock + result = VideoLiveChat.execute(id=video_id) + + videos_livechat_mock.assert_called_once_with(video_id) + + assert result == csv_file.getvalue() + + +def test_video_livechat_with_file_output(mocker, tmp_path): + youtube_mock = mocker.patch("youtool.commands.video_livechat.YouTube") + video_id = "video_id_mock" + + expected_result = [ + {column: "data" for column in VideoLiveChat.CHAT_MESSAGE_COLUMNS} + ] + + csv_file = StringIO() + csv_writer = csv.DictWriter(csv_file, fieldnames=expected_result[0].keys()) + csv_writer.writeheader() + csv_writer.writerows(expected_result) + + timestamp = datetime.now().strftime("%f") + output_file_name = f"output_{timestamp}.csv" + output_file_path = tmp_path / output_file_name + + videos_livechat_mock = Mock(return_value=expected_result) + youtube_mock.return_value.video_livechat = videos_livechat_mock + + result_file_path = VideoLiveChat.execute(id=video_id, output_file_path=output_file_path) + + with open(result_file_path, "r") as result_csv_file: + result_csv = result_csv_file.read() + + videos_livechat_mock.assert_called_once_with(video_id) + + assert result_csv.replace("\r", "") == csv_file.getvalue().replace("\r", "") From be977aa83a2ec810ae591dbc106f1b0244f6587f Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 16:08:54 -0300 Subject: [PATCH 067/115] add docstrings --- tests/commands/test_video_livechat.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/tests/commands/test_video_livechat.py b/tests/commands/test_video_livechat.py index 6f22ad9..c91db87 100644 --- a/tests/commands/test_video_livechat.py +++ b/tests/commands/test_video_livechat.py @@ -8,6 +8,10 @@ def test_video_livechat(mocker): + """Test case for fetching live chat messages from a YouTube video. + + Mocks the YouTube API to return expected live chat messages and verifies if the execute method correctly formats and returns the data. + """ youtube_mock = mocker.patch("youtool.commands.video_livechat.YouTube") video_id = "video_id_mock" @@ -30,6 +34,10 @@ def test_video_livechat(mocker): def test_video_livechat_with_file_output(mocker, tmp_path): + """Test case for fetching live chat messages from a YouTube video and saving them to a CSV file. + + Mocks the YouTube API to return expected live chat messages and verifies if the execute method correctly saves the data to a CSV file. + """ youtube_mock = mocker.patch("youtool.commands.video_livechat.YouTube") video_id = "video_id_mock" From 93a94da7f6464bc3a68b3159a08592dc69d16475 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 21:43:28 -0300 Subject: [PATCH 068/115] Add video_transcription command --- youtool/commands/video_transcription.py | 70 +++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 youtool/commands/video_transcription.py diff --git a/youtool/commands/video_transcription.py b/youtool/commands/video_transcription.py new file mode 100644 index 0000000..c0dace9 --- /dev/null +++ b/youtool/commands/video_transcription.py @@ -0,0 +1,70 @@ +import csv +from pathlib import Path +from typing import List, Dict +from .base import Command +from youtool import YouTube + +class VideoTranscription(Command): + """Download video transcriptions based on language code, path, and list of video IDs or URLs (or CSV filename with URLs/IDs inside). + Download files to destination and report results.""" + + name = "video-transcription" + arguments = [ + {"name": "--ids", "type": str, "help": "Video IDs", "nargs": "*"}, + {"name": "--urls", "type": str, "help": "Video URLs", "nargs": "*"}, + {"name": "--input-file-path", "type": str, "help": "CSV file path containing video IDs or URLs"}, + {"name": "--output-dir", "type": str, "help": "Output directory to save transcriptions"}, + {"name": "--language-code", "type": str, "help": "Language code for transcription"}, + {"name": "--api-key", "type": str, "help": "API key for YouTube Data API"}, + ] + + TRANSCRIPTION_COLUMNS: List[str] = [ + "video_id", "transcription_text" + ] + + @classmethod + def execute(cls, **kwargs) -> str: + """ + Execute the video-transcription command to download transcriptions of videos based on IDs or URLs and save them to files. + + Args: + ids: A list of YouTube video IDs. + urls: A list of YouTube video URLs. + input_file_path: Path to a CSV file containing YouTube video IDs or URLs. + output_dir: Directory path to save the transcription files. + language_code: Language code for the transcription language. + api_key: The API key to authenticate with the YouTube Data API. + + Returns: + A message indicating the result of the command. Reports success or failure for each video transcription download. + """ + ids = kwargs.get("ids") + urls = kwargs.get("urls") + input_file_path = kwargs.get("input_file_path") + output_dir = kwargs.get("output_dir") + language_code = kwargs.get("language_code") + api_key = kwargs.get("api_key") + + youtube = YouTube([api_key], disable_ipv6=True) + + if input_file_path: + ids += cls.data_from_csv(Path(input_file_path), "video_id") + + if urls: + ids += [cls.video_id_from_url(url) for url in urls] + + # Remove duplicated + ids = list(set(ids)) + + # youtube.videos_transcriptions(ids, language_code, output_dir) + + results = [] + for video_id in ids: + try: + transcription = youtube.video_transcription(video_id, language_code) + output_file_path = cls.save_transcription_to_file(video_id, transcription, output_dir) + results.append(f"Transcription saved to {output_file_path}") + except Exception as e: + results.append(f"Error processing video {video_id}: {str(e)}") + + return "\n".join(results) \ No newline at end of file From c9433ff4c33943362600bb4fb45e77b3d828ba25 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 01:00:03 -0300 Subject: [PATCH 069/115] - Add test for video_transcription command; - Add some necessary improvements in other files --- tests/commands/test_channel_info.py | 4 +- tests/commands/test_video_transcription.py | 56 ++++++++++++++++++++++ tests/test_cli.py | 13 +++-- youtool/commands/base.py | 11 ++++- youtool/commands/video_transcription.py | 47 ++++++++++-------- 5 files changed, 104 insertions(+), 27 deletions(-) create mode 100644 tests/commands/test_video_transcription.py diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 62623e8..3ae97c4 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -60,4 +60,6 @@ def test_channel_ids_from_urls_and_usernames(mocker, channels_urls): channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] ) - channels_infos_mock.assert_called_once_with([ids_from_urls_mock, ids_from_usernames_mock]) + channels_infos_mock.assert_called_once() + assert ids_from_usernames_mock in channels_infos_mock.call_args.args[0] + assert ids_from_urls_mock in channels_infos_mock.call_args.args[0] diff --git a/tests/commands/test_video_transcription.py b/tests/commands/test_video_transcription.py new file mode 100644 index 0000000..6912c1b --- /dev/null +++ b/tests/commands/test_video_transcription.py @@ -0,0 +1,56 @@ +from unittest.mock import Mock + +from youtool.commands import VideoTranscription + + +def test_video_transcription(mocker, videos_ids, videos_urls, tmp_path): + youtube_mock = mocker.patch("youtool.commands.video_transcription.YouTube") + + language_code = "pt_br" + + videos_transcriptions_mock = Mock() + youtube_mock.return_value.videos_transcriptions = videos_transcriptions_mock + + for video_id in videos_ids: + open(tmp_path / f"{video_id}.{language_code}.vtt", "a").close() + + result = VideoTranscription.execute( + ids=videos_ids, urls=videos_urls, language_code=language_code, output_dir=tmp_path + ) + + videos_transcriptions_mock.assert_called_once_with( + list(set(videos_ids)), language_code, tmp_path + ) + + for video_id in videos_ids: + assert str(tmp_path / f"{video_id}.{language_code}.vtt") in result + + +def test_video_transcription_input_from_file(mocker, videos_ids, tmp_path): + youtube_mock = mocker.patch("youtool.commands.video_transcription.YouTube") + + language_code = "pt_br" + + videos_transcriptions_mock = Mock() + youtube_mock.return_value.videos_transcriptions = videos_transcriptions_mock + + input_file_path = tmp_path / "input_file.csv" + + with open(input_file_path, "w") as input_csv: + input_csv.write("video_id\n" + "\n".join(videos_ids)) + + for video_id in videos_ids: + open(tmp_path / f"{video_id}.{language_code}.vtt", "a").close() + + result = VideoTranscription.execute( + ids=None, urls=None, + language_code=language_code, output_dir=tmp_path, + input_file_path=input_file_path + ) + + videos_transcriptions_mock.assert_called_once_with( + list(set(videos_ids)), language_code, tmp_path + ) + + for video_id in videos_ids: + assert str(tmp_path / f"{video_id}.{language_code}.vtt") in result \ No newline at end of file diff --git a/tests/test_cli.py b/tests/test_cli.py index 9165041..92aa4fa 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,5 +1,6 @@ import pytest +from pathlib import Path from subprocess import run from youtool.commands import COMMANDS @@ -17,9 +18,13 @@ def test_missing_api_key(monkeypatch: pytest.MonkeyPatch, command: Command): from the youtool CLI results in an appropriate error message and exit code. """ monkeypatch.delenv('YOUTUBE_API_KEY', raising=False) - cli_path = "youtool/cli.py" - command = ["python", cli_path, command.name] - result = run(command, capture_output=True, text=True, check=False) + cli_path = Path("youtool") / "cli.py" + command_string = ["python", cli_path, command.name] + for arg in command.arguments: + if arg.get("required"): + command_string.append(arg.get("name")) + command_string.append("test_value") + result = run(command_string, capture_output=True, text=True, check=False) assert result.returncode == 2 - assert "YouTube API Key is required" in result.stderr \ No newline at end of file + assert "YouTube API Key is required" in result.stderr diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 20e1708..50068d6 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -80,7 +80,11 @@ def execute(cls, **kwargs) -> str: # noqa: D417 raise NotImplementedError() @staticmethod - def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> List[str]: + def data_from_csv( + file_path: Path, + data_column_name: Optional[str] = None, + raise_column_exception: bool = True + ) -> List[str]: """Extracts a list of URLs from a specified CSV file. Args: @@ -107,7 +111,10 @@ def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> Li raise ValueError("Fieldnames is None") if data_column_name not in fieldnames: - raise Exception(f"Column {data_column_name} not found on {file_path}") + if raise_column_exception: + raise Exception(f"Column {data_column_name} not found on {file_path}") + return data + for row in reader: value = row.get(data_column_name) if value is not None: diff --git a/youtool/commands/video_transcription.py b/youtool/commands/video_transcription.py index c0dace9..39a2f9f 100644 --- a/youtool/commands/video_transcription.py +++ b/youtool/commands/video_transcription.py @@ -16,11 +16,12 @@ class VideoTranscription(Command): {"name": "--output-dir", "type": str, "help": "Output directory to save transcriptions"}, {"name": "--language-code", "type": str, "help": "Language code for transcription"}, {"name": "--api-key", "type": str, "help": "API key for YouTube Data API"}, + {"name": "--url-column-name", "type": str, "help": "URL column name on csv input files"}, + {"name": "--id-column-name", "type": str, "help": "Channel ID column name on csv output files"} ] - TRANSCRIPTION_COLUMNS: List[str] = [ - "video_id", "transcription_text" - ] + ID_COLUMN_NAME: str = "video_id" + URL_COLUMN_NAME: str = "video_url" @classmethod def execute(cls, **kwargs) -> str: @@ -38,33 +39,39 @@ def execute(cls, **kwargs) -> str: Returns: A message indicating the result of the command. Reports success or failure for each video transcription download. """ - ids = kwargs.get("ids") - urls = kwargs.get("urls") + ids = kwargs.get("ids") or [] + urls = kwargs.get("urls") or [] input_file_path = kwargs.get("input_file_path") output_dir = kwargs.get("output_dir") language_code = kwargs.get("language_code") api_key = kwargs.get("api_key") + url_column_name = kwargs.get("url_column_name", cls.URL_COLUMN_NAME) + id_column_name = kwargs.get("id_column_name", cls.ID_COLUMN_NAME) + youtube = YouTube([api_key], disable_ipv6=True) - if input_file_path: - ids += cls.data_from_csv(Path(input_file_path), "video_id") + if (input_file_path := kwargs.get("input_file_path")): + if (urls_from_csv := cls.data_from_csv(input_file_path, url_column_name, False)): + ids += [cls.video_id_from_url(url) for url in urls_from_csv] + if (ids_from_csv := cls.data_from_csv(input_file_path, id_column_name, False)): + ids += ids_from_csv + + if not ids and not urls: + raise Exception( + "Either 'ids' or 'urls' must be provided for the video-transcription command" + ) if urls: ids += [cls.video_id_from_url(url) for url in urls] # Remove duplicated ids = list(set(ids)) - - # youtube.videos_transcriptions(ids, language_code, output_dir) - - results = [] - for video_id in ids: - try: - transcription = youtube.video_transcription(video_id, language_code) - output_file_path = cls.save_transcription_to_file(video_id, transcription, output_dir) - results.append(f"Transcription saved to {output_file_path}") - except Exception as e: - results.append(f"Error processing video {video_id}: {str(e)}") - - return "\n".join(results) \ No newline at end of file + youtube.videos_transcriptions(ids, language_code, output_dir) + output_dir_path = Path(output_dir) + saved_transcriptions = [ + str( + output_dir_path / f"{v_id}.{language_code}.vtt" + ) for v_id in ids if (output_dir_path / f"{v_id}.{language_code}.vtt").is_file() + ] + return "\n".join(saved_transcriptions) From b1d2fdfecdef5547d57eee76eb2336048ab1daf8 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 01:22:00 -0300 Subject: [PATCH 070/115] remove comments --- youtool/cli.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index 28b055c..49dfe12 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -25,9 +25,6 @@ def main(): subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed") - # cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") - # cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") - for command in COMMANDS: command.parse_arguments(subparsers) From 160cecd08f99380a282fafb14f90d72332a9ebf0 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 16:16:52 -0300 Subject: [PATCH 071/115] add docstrings --- tests/commands/test_video_transcription.py | 14 ++++++++++++++ youtool/commands/video_transcription.py | 19 ++++++++++--------- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/tests/commands/test_video_transcription.py b/tests/commands/test_video_transcription.py index 6912c1b..d3ee1f3 100644 --- a/tests/commands/test_video_transcription.py +++ b/tests/commands/test_video_transcription.py @@ -4,6 +4,13 @@ def test_video_transcription(mocker, videos_ids, videos_urls, tmp_path): + """ + Test the video transcription command. + + This test verifies the functionality of the VideoTranscription.execute method. + It mocks the YouTube API to simulate fetching transcriptions for given video IDs or URLs. + Transcriptions are expected to be saved in VTT format in the specified temporary directory. + """ youtube_mock = mocker.patch("youtool.commands.video_transcription.YouTube") language_code = "pt_br" @@ -27,6 +34,13 @@ def test_video_transcription(mocker, videos_ids, videos_urls, tmp_path): def test_video_transcription_input_from_file(mocker, videos_ids, tmp_path): + """Test the video transcription command with input from a CSV file. + + This test verifies the functionality of the VideoTranscription.execute method when + video IDs are provided via a CSV file. It mocks the YouTube API to simulate fetching + transcriptions for the listed video IDs. Transcriptions are expected to be saved in + VTT format in the specified temporary directory. + """ youtube_mock = mocker.patch("youtool.commands.video_transcription.YouTube") language_code = "pt_br" diff --git a/youtool/commands/video_transcription.py b/youtool/commands/video_transcription.py index 39a2f9f..e895e5a 100644 --- a/youtool/commands/video_transcription.py +++ b/youtool/commands/video_transcription.py @@ -25,19 +25,20 @@ class VideoTranscription(Command): @classmethod def execute(cls, **kwargs) -> str: - """ - Execute the video-transcription command to download transcriptions of videos based on IDs or URLs and save them to files. + """Execute the video-transcription command to download transcriptions of videos based on IDs or URLs and save them to files. Args: - ids: A list of YouTube video IDs. - urls: A list of YouTube video URLs. - input_file_path: Path to a CSV file containing YouTube video IDs or URLs. - output_dir: Directory path to save the transcription files. - language_code: Language code for the transcription language. - api_key: The API key to authenticate with the YouTube Data API. + ids (List[str]): A list of YouTube video IDs. + urls (List[str]): A list of YouTube video URLs. + input_file_path (str): Path to a CSV file containing YouTube video IDs or URLs. + output_dir (str): Directory path to save the transcription files. + language_code (str): Language code for the transcription language. + api_key (str): The API key to authenticate with the YouTube Data API. + url_column_name (str, optional): Column name for URLs in the CSV input file. Defaults to "video_url". + id_column_name (str, optional): Column name for IDs in the CSV output file. Defaults to "video_id". Returns: - A message indicating the result of the command. Reports success or failure for each video transcription download. + str: A message indicating the result of the command. Reports success or failure for each video transcription download. """ ids = kwargs.get("ids") or [] urls = kwargs.get("urls") or [] From cfd3bf16c9dd817037239f0a51de808a1ef4a2b6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Justen=20=28=40turicas=29?= Date: Sun, 19 May 2024 21:28:04 -0300 Subject: [PATCH 072/115] Implement draft CLI module --- youtool/cli.py | 64 ++++++++++++++++++++++++-------------------------- 1 file changed, 31 insertions(+), 33 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index 49dfe12..be0bbd0 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -1,45 +1,43 @@ import argparse -import os - -from youtool.commands import COMMANDS def main(): - """Main function for the YouTube CLI Tool. + parser = argparse.ArgumentParser() + parser.add_argument("--api-key") + subparsers = parser.add_subparsers(required=True, dest="command") + + api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") - This function sets up the argument parser for the CLI tool, including options for the YouTube API key and - command-specific subparsers. It then parses the command-line arguments, retrieving the YouTube API key - from either the command-line argument '--api-key' or the environment variable 'YOUTUBE_API_KEY'. If the API - key is not provided through any means, it raises an argparse.ArgumentError. + cmd_channel_id = subparsers.add_parser("channel-id", help="Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs)") + cmd_channel_info = subparsers.add_parser("channel-info", help="Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output (same schema for `channel` dicts)") + cmd_video_info = subparsers.add_parser("video-info", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (same schema for `video` dicts)") + cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") + cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") + cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") + cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") - Finally, the function executes the appropriate command based on the parsed arguments. If an exception occurs - during the execution of the command, it is caught and raised as an argparse error for proper handling. + args = parser.parse_args() - Raises: - argparse.ArgumentError: If the YouTube API key is not provided. - argparse.ArgumentError: If there is an error during the execution of the command. - """ - parser = argparse.ArgumentParser(description="CLI Tool for managing YouTube videos add playlists") - parser.add_argument("--api-key", type=str, help="YouTube API Key", dest="api_key") - parser.add_argument("--debug", default=False, action="store_true", help="Debug mode", dest="debug") - - subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed") + if args.command == "channel-id": + print(f"Implement: {args.command}") # TODO: implement - for command in COMMANDS: - command.parse_arguments(subparsers) + elif args.command == "channel-info": + print(f"Implement: {args.command}") # TODO: implement - args = parser.parse_args() - args.api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") - - if not args.api_key: - parser.error("YouTube API Key is required") - - try: - print(args.func(**args.__dict__)) - except Exception as error: - if args.debug: - raise error - parser.error(error) + elif args.command == "video-info": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-search": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-comments": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-livechat": + print(f"Implement: {args.command}") # TODO: implement + + elif args.command == "video-transcription": + print(f"Implement: {args.command}") # TODO: implement if __name__ == "__main__": From 7ced3994d401671ca7404aef64234ccb897f33b2 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:21:05 -0300 Subject: [PATCH 073/115] Add updates docstrings --- youtool/commands/base.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 50068d6..28b6150 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -80,11 +80,7 @@ def execute(cls, **kwargs) -> str: # noqa: D417 raise NotImplementedError() @staticmethod - def data_from_csv( - file_path: Path, - data_column_name: Optional[str] = None, - raise_column_exception: bool = True - ) -> List[str]: + def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> List[str]: """Extracts a list of URLs from a specified CSV file. Args: From 83ecd8b6ee5340acc6e5f0a232bd9bf59c27fff5 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:22:26 -0300 Subject: [PATCH 074/115] Add updates docstrings --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index d42f311..c599982 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: # noqa: D417 + def execute(cls, **kwargs) -> str: """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From bdd80489ca66ccac47ee9ff52b677418f4fbb649 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:51:39 -0300 Subject: [PATCH 075/115] - Add updates --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index c599982..d42f311 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: + def execute(cls, **kwargs) -> str: # noqa: D417 """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From ebfadcf606844c6291985fe9dc63997526faec73 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 21:42:53 -0300 Subject: [PATCH 076/115] Add updates --- youtool/commands/__init__.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/youtool/commands/__init__.py b/youtool/commands/__init__.py index 72913ce..be52c63 100644 --- a/youtool/commands/__init__.py +++ b/youtool/commands/__init__.py @@ -4,15 +4,20 @@ from .video_info import VideoInfo from .video_search import VideoSearch from .video_comments import VideoComments +from .video_livechat import VideoLiveChat +from .video_transcription import VideoTranscription COMMANDS = [ ChannelId, ChannelInfo, VideoInfo, VideoSearch, - VideoComments + VideoComments, + VideoLiveChat, + VideoTranscription ] __all__ = [ - "Command", "COMMANDS", "ChannelId", "ChannelInfo", "VideoInfo", "VideoSearch", "VideoComments" + "Command", "COMMANDS", "ChannelId", "ChannelInfo", "VideoInfo", "VideoSearch", "VideoComments", + "VideoLiveChat", "VideoTranscription" ] From 6a7542a0281a7e810f278d49d1a687fcc96375fb Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 13:57:55 -0300 Subject: [PATCH 077/115] - Add test for channel_info command; - Add update channel_info file; - fix test_base --- tests/commands/test_base.py | 2 +- tests/commands/test_channel_info.py | 15 ++++----------- 2 files changed, 5 insertions(+), 12 deletions(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index afbcf06..7cf87d3 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -111,7 +111,7 @@ def test_data_from_csv_column_not_found(mock_csv_file): file_path = Path("tests/resources/csv_column_not_found.csv") with pytest.raises(Exception) as exc_info: Command.data_from_csv(file_path, "NonExistentColumn") - assert f"Column NonExistentColumn not found on {file_path}" in str(exc_info.value) + assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value) @pytest.fixture diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 3ae97c4..713e1ec 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,12 +31,7 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker, channels_urls): - """Test to verify fetching channel IDs from both URLs and usernames. - - This test checks if the `execute` method of the `ChannelInfo` class correctly fetches channel IDs - from a list of URLs and usernames, and then calls the `channels_infos` method with these IDs. - """ +def test_channel_ids_from_urls_and_usernames(mocker): urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] usernames = ["Turicas", "PythonicCafe"] @@ -52,14 +47,12 @@ def test_channel_ids_from_urls_and_usernames(mocker, channels_urls): youtube_mock.return_value.channel_id_from_username = channel_id_from_username_mock youtube_mock.return_value.channels_infos = channels_infos_mock - ChannelInfo.execute(urls=channels_urls, usernames=usernames) + ChannelInfo.execute(urls=urls, usernames=usernames) channel_id_from_url_mock.assert_has_calls( - [call(url) for url in channels_urls] + [call(url) for url in urls] ) channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] ) - channels_infos_mock.assert_called_once() - assert ids_from_usernames_mock in channels_infos_mock.call_args.args[0] - assert ids_from_urls_mock in channels_infos_mock.call_args.args[0] + channels_infos_mock.assert_called_once_with([ids_from_urls_mock, ids_from_usernames_mock]) From 7f7e633c410032cf915835c7302c6cea599a9bfe Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 21:32:42 -0300 Subject: [PATCH 078/115] - Add test for video_search command; - Add updates to some test files; - created conftest file --- tests/commands/test_channel_info.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 713e1ec..bb18e95 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,10 +31,7 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker): - urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] - usernames = ["Turicas", "PythonicCafe"] - +def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): ids_from_urls_mock = "id_from_url" ids_from_usernames_mock = "id_from_username" youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") @@ -47,10 +44,10 @@ def test_channel_ids_from_urls_and_usernames(mocker): youtube_mock.return_value.channel_id_from_username = channel_id_from_username_mock youtube_mock.return_value.channels_infos = channels_infos_mock - ChannelInfo.execute(urls=urls, usernames=usernames) + ChannelInfo.execute(urls=channels_urls, usernames=usernames) channel_id_from_url_mock.assert_has_calls( - [call(url) for url in urls] + [call(url) for url in channels_urls] ) channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] From 6ed06e7dda3b50fb35ce2477b13e25b13574fd46 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 01:00:03 -0300 Subject: [PATCH 079/115] - Add test for video_transcription command; - Add some necessary improvements in other files --- tests/commands/test_channel_info.py | 4 +++- youtool/commands/base.py | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index bb18e95..329680e 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -52,4 +52,6 @@ def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] ) - channels_infos_mock.assert_called_once_with([ids_from_urls_mock, ids_from_usernames_mock]) + channels_infos_mock.assert_called_once() + assert ids_from_usernames_mock in channels_infos_mock.call_args.args[0] + assert ids_from_urls_mock in channels_infos_mock.call_args.args[0] diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 28b6150..50068d6 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -80,7 +80,11 @@ def execute(cls, **kwargs) -> str: # noqa: D417 raise NotImplementedError() @staticmethod - def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> List[str]: + def data_from_csv( + file_path: Path, + data_column_name: Optional[str] = None, + raise_column_exception: bool = True + ) -> List[str]: """Extracts a list of URLs from a specified CSV file. Args: From 4b5cdbe932abc0e09c37a1e73b44430abacf0668 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 02:46:32 -0300 Subject: [PATCH 080/115] Add cli config --- setup.py | 43 +++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 41 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 6068493..31ff255 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,42 @@ -from setuptools import setup +from setuptools import setup, find_packages -setup() +setup( + name="youtool", + version="0.1.0", + packages=find_packages(), + install_requires=[ + 'youtool[cli]', + ], + extras_require={ + 'cli': [ + 'loguru', + 'tqdm' + ], + 'transcription': [ + 'yt-dlp' + ], + 'livechat': [ + 'chat-downloader' + ], + 'dev': [ + 'autoflake', + 'black', + 'flake8', + 'ipython', + 'isort', + 'pytest', + 'pytest-dependency', + 'twine', + 'wheel' + ], + 'base': [ + 'isodate', + 'requests' + ], + }, + entry_points={ + 'console_scripts': [ + 'youtool=youtool.cli:main', + ], + }, +) From 9a80dafb72fe584b91cc3d2b11d10679f8549f28 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 03:02:44 -0300 Subject: [PATCH 081/115] remove unnecessary files --- requirements/base.txt | 2 -- requirements/cli.txt | 2 -- requirements/dev.txt | 9 --------- requirements/livechat.txt | 1 - requirements/transcription.txt | 2 -- 5 files changed, 16 deletions(-) delete mode 100644 requirements/base.txt delete mode 100644 requirements/cli.txt delete mode 100644 requirements/dev.txt delete mode 100644 requirements/livechat.txt delete mode 100644 requirements/transcription.txt diff --git a/requirements/base.txt b/requirements/base.txt deleted file mode 100644 index ea93b32..0000000 --- a/requirements/base.txt +++ /dev/null @@ -1,2 +0,0 @@ -isodate -requests diff --git a/requirements/cli.txt b/requirements/cli.txt deleted file mode 100644 index 0ff8c5c..0000000 --- a/requirements/cli.txt +++ /dev/null @@ -1,2 +0,0 @@ -loguru -tqdm diff --git a/requirements/dev.txt b/requirements/dev.txt deleted file mode 100644 index 9a89a93..0000000 --- a/requirements/dev.txt +++ /dev/null @@ -1,9 +0,0 @@ -autoflake -black -flake8 -ipython -isort -pytest -pytest-dependency -twine -wheel diff --git a/requirements/livechat.txt b/requirements/livechat.txt deleted file mode 100644 index f035dbe..0000000 --- a/requirements/livechat.txt +++ /dev/null @@ -1 +0,0 @@ -chat-downloader diff --git a/requirements/transcription.txt b/requirements/transcription.txt deleted file mode 100644 index 47e3da8..0000000 --- a/requirements/transcription.txt +++ /dev/null @@ -1,2 +0,0 @@ -webvtt-py -yt-dlp From 065acf39f22a520aa4688cf4c35fac1545ebf35c Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Sun, 30 Jun 2024 19:26:03 -0300 Subject: [PATCH 082/115] Add fix in file --- youtool/cli.py | 29 +++++++++-------------------- 1 file changed, 9 insertions(+), 20 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index be0bbd0..70739c1 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -18,26 +18,15 @@ def main(): args = parser.parse_args() - if args.command == "channel-id": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "channel-info": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "video-info": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "video-search": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "video-comments": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "video-livechat": - print(f"Implement: {args.command}") # TODO: implement - - elif args.command == "video-transcription": - print(f"Implement: {args.command}") # TODO: implement + if not args.api_key: + parser.error("YouTube API Key is required") + + try: + print(args.func(**args.__dict__)) + except Exception as error: + if args.debug: + raise error + parser.error(error) if __name__ == "__main__": From a6459238c65d6d1c6c3f5f8a78954ffb85e16290 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:21:05 -0300 Subject: [PATCH 083/115] Add updates docstrings --- youtool/commands/base.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 50068d6..28b6150 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -80,11 +80,7 @@ def execute(cls, **kwargs) -> str: # noqa: D417 raise NotImplementedError() @staticmethod - def data_from_csv( - file_path: Path, - data_column_name: Optional[str] = None, - raise_column_exception: bool = True - ) -> List[str]: + def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> List[str]: """Extracts a list of URLs from a specified CSV file. Args: From 96a9f056c5362c32ba6c84c431b856bcace60ed4 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:22:26 -0300 Subject: [PATCH 084/115] Add updates docstrings --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index d42f311..c599982 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: # noqa: D417 + def execute(cls, **kwargs) -> str: """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From 533ec6a3d6afa61c485364ab2a12826a04a7280a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:51:39 -0300 Subject: [PATCH 085/115] - Add updates --- youtool/commands/channel_id.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/commands/channel_id.py b/youtool/commands/channel_id.py index c599982..d42f311 100644 --- a/youtool/commands/channel_id.py +++ b/youtool/commands/channel_id.py @@ -21,7 +21,7 @@ class ChannelId(Command): CHANNEL_ID_COLUMN_NAME: str = "channel_id" @classmethod - def execute(cls, **kwargs) -> str: + def execute(cls, **kwargs) -> str: # noqa: D417 """Execute the channel-id command to fetch YouTube channel IDs from URLs and save them to a CSV file. This method retrieves YouTube channel IDs from a list of provided URLs or from a file containing URLs. From d4b8986857d4c115abd5489bc9aa8c4e5928d2f0 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 27 Jun 2024 22:53:10 -0300 Subject: [PATCH 086/115] - Add updates --- youtool/cli.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/youtool/cli.py b/youtool/cli.py index 70739c1..362d1d3 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -2,12 +2,17 @@ def main(): +<<<<<<< HEAD parser = argparse.ArgumentParser() parser.add_argument("--api-key") subparsers = parser.add_subparsers(required=True, dest="command") +======= + """Main function for the YouTube CLI Tool. +>>>>>>> 7133ae0 (- Add updates) api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") +<<<<<<< HEAD cmd_channel_id = subparsers.add_parser("channel-id", help="Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs)") cmd_channel_info = subparsers.add_parser("channel-info", help="Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output (same schema for `channel` dicts)") cmd_video_info = subparsers.add_parser("video-info", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (same schema for `video` dicts)") @@ -15,6 +20,28 @@ def main(): cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") +======= + Finally, the function executes the appropriate command based on the parsed arguments. If an exception occurs + during the execution of the command, it is caught and raised as an argparse error for proper handling. + + Raises: + argparse.ArgumentError: If the YouTube API key is not provided. + argparse.ArgumentError: If there is an error during the execution of the command. + """ + parser = argparse.ArgumentParser(description="CLI Tool for managing YouTube videos add playlists") + parser.add_argument("--api-key", type=str, help="YouTube API Key", dest="api_key") + parser.add_argument("--debug", type=bool, help="Debug mode", dest="debug") + + subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed") + + # cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") + # cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") + # cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") + # cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") + + for command in COMMANDS: + command.parse_arguments(subparsers) +>>>>>>> 7133ae0 (- Add updates) args = parser.parse_args() From 8c77ab23993db29b3e52a0242c60b4aeba43ded6 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 12:40:16 -0300 Subject: [PATCH 087/115] Add update --- tests/commands/test_base.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 7cf87d3..61aba24 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -111,7 +111,11 @@ def test_data_from_csv_column_not_found(mock_csv_file): file_path = Path("tests/resources/csv_column_not_found.csv") with pytest.raises(Exception) as exc_info: Command.data_from_csv(file_path, "NonExistentColumn") +<<<<<<< HEAD assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value) +======= + assert f"Column NonExistentColumn not found on {file_path}" in str(exc_info.value) +>>>>>>> c347dd2 (Add update) @pytest.fixture From 264b31113cdd35eefb43bd37d64e413df8791e23 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Wed, 3 Jul 2024 15:28:21 -0300 Subject: [PATCH 088/115] Fix --- tests/commands/test_base.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 61aba24..7cf87d3 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -111,11 +111,7 @@ def test_data_from_csv_column_not_found(mock_csv_file): file_path = Path("tests/resources/csv_column_not_found.csv") with pytest.raises(Exception) as exc_info: Command.data_from_csv(file_path, "NonExistentColumn") -<<<<<<< HEAD assert "Column NonExistentColumn not found on tests/resources/csv_column_not_found.csv" in str(exc_info.value) -======= - assert f"Column NonExistentColumn not found on {file_path}" in str(exc_info.value) ->>>>>>> c347dd2 (Add update) @pytest.fixture From 7ed6840b4ea1ddff9dae6915899322211c059e3b Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 13:57:55 -0300 Subject: [PATCH 089/115] - Add test for channel_info command; - Add update channel_info file; - fix test_base --- tests/commands/test_channel_info.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 329680e..713e1ec 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,7 +31,10 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): +def test_channel_ids_from_urls_and_usernames(mocker): + urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] + usernames = ["Turicas", "PythonicCafe"] + ids_from_urls_mock = "id_from_url" ids_from_usernames_mock = "id_from_username" youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") @@ -44,14 +47,12 @@ def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): youtube_mock.return_value.channel_id_from_username = channel_id_from_username_mock youtube_mock.return_value.channels_infos = channels_infos_mock - ChannelInfo.execute(urls=channels_urls, usernames=usernames) + ChannelInfo.execute(urls=urls, usernames=usernames) channel_id_from_url_mock.assert_has_calls( - [call(url) for url in channels_urls] + [call(url) for url in urls] ) channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] ) - channels_infos_mock.assert_called_once() - assert ids_from_usernames_mock in channels_infos_mock.call_args.args[0] - assert ids_from_urls_mock in channels_infos_mock.call_args.args[0] + channels_infos_mock.assert_called_once_with([ids_from_urls_mock, ids_from_usernames_mock]) From 1f30a9b10981380bf03e78bd9f96c9e1ffc11d0a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 21:32:42 -0300 Subject: [PATCH 090/115] - Add test for video_search command; - Add updates to some test files; - created conftest file --- tests/commands/test_channel_info.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 713e1ec..bb18e95 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,10 +31,7 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker): - urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] - usernames = ["Turicas", "PythonicCafe"] - +def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): ids_from_urls_mock = "id_from_url" ids_from_usernames_mock = "id_from_username" youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") @@ -47,10 +44,10 @@ def test_channel_ids_from_urls_and_usernames(mocker): youtube_mock.return_value.channel_id_from_username = channel_id_from_username_mock youtube_mock.return_value.channels_infos = channels_infos_mock - ChannelInfo.execute(urls=urls, usernames=usernames) + ChannelInfo.execute(urls=channels_urls, usernames=usernames) channel_id_from_url_mock.assert_has_calls( - [call(url) for url in urls] + [call(url) for url in channels_urls] ) channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] From 088b3f082a63a42913312f694bc523ca72cf442d Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 01:00:03 -0300 Subject: [PATCH 091/115] - Add test for video_transcription command; - Add some necessary improvements in other files --- tests/commands/test_channel_info.py | 4 +++- youtool/commands/base.py | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index bb18e95..329680e 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -52,4 +52,6 @@ def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): channel_id_from_username_mock.assert_has_calls( [call(username) for username in usernames] ) - channels_infos_mock.assert_called_once_with([ids_from_urls_mock, ids_from_usernames_mock]) + channels_infos_mock.assert_called_once() + assert ids_from_usernames_mock in channels_infos_mock.call_args.args[0] + assert ids_from_urls_mock in channels_infos_mock.call_args.args[0] diff --git a/youtool/commands/base.py b/youtool/commands/base.py index 28b6150..50068d6 100644 --- a/youtool/commands/base.py +++ b/youtool/commands/base.py @@ -80,7 +80,11 @@ def execute(cls, **kwargs) -> str: # noqa: D417 raise NotImplementedError() @staticmethod - def data_from_csv(file_path: Path, data_column_name: Optional[str] = None) -> List[str]: + def data_from_csv( + file_path: Path, + data_column_name: Optional[str] = None, + raise_column_exception: bool = True + ) -> List[str]: """Extracts a list of URLs from a specified CSV file. Args: From d00b97b3581d7425504dcd39e51b8864bd93c37f Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 19:10:20 -0300 Subject: [PATCH 092/115] add poetry --- poetry.lock | 1592 ++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 33 + setup.py | 42 -- 3 files changed, 1625 insertions(+), 42 deletions(-) create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 setup.py diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..90f741d --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1592 @@ +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. + +[[package]] +name = "asttokens" +version = "2.4.1" +description = "Annotate AST trees with source code positions" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[package.dependencies] +six = ">=1.12.0" + +[package.extras] +astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"] +test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] + +[[package]] +name = "autoflake" +version = "1.7.8" +description = "Removes unused imports and unused variables" +optional = false +python-versions = ">=3.7" +files = [ + {file = "autoflake-1.7.8-py3-none-any.whl", hash = "sha256:46373ef69b6714f5064c923bb28bd797c4f8a9497f557d87fc36665c6d956b39"}, + {file = "autoflake-1.7.8.tar.gz", hash = "sha256:e7e46372dee46fa1c97acf310d99d922b63d369718a270809d7c278d34a194cf"}, +] + +[package.dependencies] +pyflakes = ">=1.1.0,<3" + +[[package]] +name = "backports-tarfile" +version = "1.2.0" +description = "Backport of CPython tarfile module" +optional = false +python-versions = ">=3.8" +files = [ + {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, + {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] + +[[package]] +name = "black" +version = "23.12.1" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.8" +files = [ + {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, + {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, + {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, + {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, + {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, + {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, + {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, + {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, + {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, + {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, + {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, + {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, + {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, + {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, + {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, + {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, + {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, + {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, + {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, + {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, + {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, + {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "brotli" +version = "1.1.0" +description = "Python bindings for the Brotli compression library" +optional = false +python-versions = "*" +files = [ + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e1140c64812cb9b06c922e77f1c26a75ec5e3f0fb2bf92cc8c58720dec276752"}, + {file = "Brotli-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8fd5270e906eef71d4a8d19b7c6a43760c6abcfcc10c9101d14eb2357418de9"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ae56aca0402a0f9a3431cddda62ad71666ca9d4dc3a10a142b9dce2e3c0cda3"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43ce1b9935bfa1ede40028054d7f48b5469cd02733a365eec8a329ffd342915d"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7c4855522edb2e6ae7fdb58e07c3ba9111e7621a8956f481c68d5d979c93032e"}, + {file = "Brotli-1.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:38025d9f30cf4634f8309c6874ef871b841eb3c347e90b0851f63d1ded5212da"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6a904cb26bfefc2f0a6f240bdf5233be78cd2488900a2f846f3c3ac8489ab80"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"}, + {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"}, + {file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"}, + {file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"}, + {file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c8146669223164fc87a7e3de9f81e9423c67a79d6b3447994dfb9c95da16e2d6"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30924eb4c57903d5a7526b08ef4a584acc22ab1ffa085faceb521521d2de32dd"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ceb64bbc6eac5a140ca649003756940f8d6a7c444a68af170b3187623b43bebf"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a469274ad18dc0e4d316eefa616d1d0c2ff9da369af19fa6f3daa4f09671fd61"}, + {file = "Brotli-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:524f35912131cc2cabb00edfd8d573b07f2d9f21fa824bd3fb19725a9cf06327"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5b3cc074004d968722f51e550b41a27be656ec48f8afaeeb45ebf65b561481dd"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"}, + {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"}, + {file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"}, + {file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"}, + {file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f4bf76817c14aa98cc6697ac02f3972cb8c3da93e9ef16b9c66573a68014f91"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0c5516f0aed654134a2fc936325cc2e642f8a0e096d075209672eb321cff408"}, + {file = "Brotli-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c3020404e0b5eefd7c9485ccf8393cfb75ec38ce75586e046573c9dc29967a0"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:4ed11165dd45ce798d99a136808a794a748d5dc38511303239d4e2363c0695dc"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"}, + {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"}, + {file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"}, + {file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"}, + {file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4d4a848d1837973bf0f4b5e54e3bec977d99be36a7895c61abb659301b02c112"}, + {file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fdc3ff3bfccdc6b9cc7c342c03aa2400683f0cb891d46e94b64a197910dc4064"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5eeb539606f18a0b232d4ba45adccde4125592f3f636a6182b4a8a436548b914"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"}, + {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"}, + {file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"}, + {file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"}, + {file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f733d788519c7e3e71f0855c96618720f5d3d60c3cb829d8bbb722dddce37985"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:929811df5462e182b13920da56c6e0284af407d1de637d8e536c5cd00a7daf60"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0b63b949ff929fbc2d6d3ce0e924c9b93c9785d877a21a1b678877ffbbc4423a"}, + {file = "Brotli-1.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d192f0f30804e55db0d0e0a35d83a9fead0e9a359a9ed0285dbacea60cc10a84"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f296c40e23065d0d6650c4aefe7470d2a25fffda489bcc3eb66083f3ac9f6643"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"}, + {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"}, + {file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"}, + {file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"}, + {file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03d20af184290887bdea3f0f78c4f737d126c74dc2f3ccadf07e54ceca3bf208"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6172447e1b368dcbc458925e5ddaf9113477b0ed542df258d84fa28fc45ceea7"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a743e5a28af5f70f9c080380a5f908d4d21d40e8f0e0c8901604d15cfa9ba751"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0541e747cce78e24ea12d69176f6a7ddb690e62c425e01d31cc065e69ce55b48"}, + {file = "Brotli-1.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cdbc1fc1bc0bff1cef838eafe581b55bfbffaed4ed0318b724d0b71d4d377619"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:890b5a14ce214389b2cc36ce82f3093f96f4cc730c1cffdbefff77a7c71f2a97"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"}, + {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"}, + {file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"}, + {file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"}, + {file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7905193081db9bfa73b1219140b3d315831cbff0d8941f22da695832f0dd188f"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a77def80806c421b4b0af06f45d65a136e7ac0bdca3c09d9e2ea4e515367c7e9"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8dadd1314583ec0bf2d1379f7008ad627cd6336625d6679cf2f8e67081b83acf"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:901032ff242d479a0efa956d853d16875d42157f98951c0230f69e69f9c09bac"}, + {file = "Brotli-1.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:22fc2a8549ffe699bfba2256ab2ed0421a7b8fadff114a3d201794e45a9ff578"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ae15b066e5ad21366600ebec29a7ccbc86812ed267e4b28e860b8ca16a2bc474"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"}, + {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"}, + {file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"}, + {file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"}, + {file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"}, +] + +[[package]] +name = "brotlicffi" +version = "1.1.0.0" +description = "Python CFFI bindings to the Brotli library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "brotlicffi-1.1.0.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:9b7ae6bd1a3f0df532b6d67ff674099a96d22bc0948955cb338488c31bfb8851"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19ffc919fa4fc6ace69286e0a23b3789b4219058313cf9b45625016bf7ff996b"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9feb210d932ffe7798ee62e6145d3a757eb6233aa9a4e7db78dd3690d7755814"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84763dbdef5dd5c24b75597a77e1b30c66604725707565188ba54bab4f114820"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-win32.whl", hash = "sha256:1b12b50e07c3911e1efa3a8971543e7648100713d4e0971b13631cce22c587eb"}, + {file = "brotlicffi-1.1.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:994a4f0681bb6c6c3b0925530a1926b7a189d878e6e5e38fae8efa47c5d9c613"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2e4aeb0bd2540cb91b069dbdd54d458da8c4334ceaf2d25df2f4af576d6766ca"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b7b0033b0d37bb33009fb2fef73310e432e76f688af76c156b3594389d81391"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54a07bb2374a1eba8ebb52b6fafffa2afd3c4df85ddd38fcc0511f2bb387c2a8"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7901a7dc4b88f1c1475de59ae9be59799db1007b7d059817948d8e4f12e24e35"}, + {file = "brotlicffi-1.1.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce01c7316aebc7fce59da734286148b1d1b9455f89cf2c8a4dfce7d41db55c2d"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:246f1d1a90279bb6069de3de8d75a8856e073b8ff0b09dcca18ccc14cec85979"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc4bc5d82bc56ebd8b514fb8350cfac4627d6b0743382e46d033976a5f80fab6"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37c26ecb14386a44b118ce36e546ce307f4810bc9598a6e6cb4f7fca725ae7e6"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca72968ae4eaf6470498d5c2887073f7efe3b1e7d7ec8be11a06a79cc810e990"}, + {file = "brotlicffi-1.1.0.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:add0de5b9ad9e9aa293c3aa4e9deb2b61e99ad6c1634e01d01d98c03e6a354cc"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9b6068e0f3769992d6b622a1cd2e7835eae3cf8d9da123d7f51ca9c1e9c333e5"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8557a8559509b61e65083f8782329188a250102372576093c88930c875a69838"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a7ae37e5d79c5bdfb5b4b99f2715a6035e6c5bf538c3746abc8e26694f92f33"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391151ec86bb1c683835980f4816272a87eaddc46bb91cbf44f62228b84d8cca"}, + {file = "brotlicffi-1.1.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:2f3711be9290f0453de8eed5275d93d286abe26b08ab4a35d7452caa1fef532f"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1a807d760763e398bbf2c6394ae9da5815901aa93ee0a37bca5efe78d4ee3171"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa8ca0623b26c94fccc3a1fdd895be1743b838f3917300506d04aa3346fd2a14"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3de0cf28a53a3238b252aca9fed1593e9d36c1d116748013339f0949bfc84112"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6be5ec0e88a4925c91f3dea2bb0013b3a2accda6f77238f76a34a1ea532a1cb0"}, + {file = "brotlicffi-1.1.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d9eb71bb1085d996244439154387266fd23d6ad37161f6f52f1cd41dd95a3808"}, + {file = "brotlicffi-1.1.0.0.tar.gz", hash = "sha256:b77827a689905143f87915310b93b273ab17888fd43ef350d4832c4a71083c13"}, +] + +[package.dependencies] +cffi = ">=1.0.0" + +[[package]] +name = "certifi" +version = "2024.7.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, + {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, +] + +[[package]] +name = "cffi" +version = "1.16.0" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, + {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, + {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, + {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, + {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, + {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, + {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, + {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, + {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, + {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, + {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, + {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, + {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, + {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, + {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, + {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, + {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, + {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, + {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, + {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, + {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, + {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, + {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, + {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, + {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, + {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "chat-downloader" +version = "0.2.8" +description = "A simple tool used to retrieve chat messages from livestreams, videos, clips and past broadcasts. No authentication needed!" +optional = false +python-versions = ">=3.6" +files = [ + {file = "chat-downloader-0.2.8.tar.gz", hash = "sha256:5816ac06179f81190e74c773d8afda82f4be718ea6dffdf7c22bbe265e1dd428"}, + {file = "chat_downloader-0.2.8-py2.py3-none-any.whl", hash = "sha256:2d9bbddc0a85371ba44814c3686ee6e5f70c0531d1f5ecc236eae5a5bbb90465"}, +] + +[package.dependencies] +colorlog = "*" +docstring-parser = "*" +isodate = "*" +requests = "*" +websocket-client = "*" + +[package.extras] +dev = ["coverage", "flake8", "pytest", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-programoutput", "tox", "twine", "wheel"] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "colorlog" +version = "6.8.2" +description = "Add colours to the output of Python's logging module." +optional = false +python-versions = ">=3.6" +files = [ + {file = "colorlog-6.8.2-py3-none-any.whl", hash = "sha256:4dcbb62368e2800cb3c5abd348da7e53f6c362dda502ec27c560b2e58a66bd33"}, + {file = "colorlog-6.8.2.tar.gz", hash = "sha256:3e3e079a41feb5a1b64f978b5ea4f46040a94f11f0e8bbb8261e3dbbeca64d44"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +development = ["black", "flake8", "mypy", "pytest", "types-colorama"] + +[[package]] +name = "cryptography" +version = "42.0.8" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:81d8a521705787afe7a18d5bfb47ea9d9cc068206270aad0b96a725022e18d2e"}, + {file = "cryptography-42.0.8-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:961e61cefdcb06e0c6d7e3a1b22ebe8b996eb2bf50614e89384be54c48c6b63d"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3ec3672626e1b9e55afd0df6d774ff0e953452886e06e0f1eb7eb0c832e8902"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e599b53fd95357d92304510fb7bda8523ed1f79ca98dce2f43c115950aa78801"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5226d5d21ab681f432a9c1cf8b658c0cb02533eece706b155e5fbd8a0cdd3949"}, + {file = "cryptography-42.0.8-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6b7c4f03ce01afd3b76cf69a5455caa9cfa3de8c8f493e0d3ab7d20611c8dae9"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2346b911eb349ab547076f47f2e035fc8ff2c02380a7cbbf8d87114fa0f1c583"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ad803773e9df0b92e0a817d22fd8a3675493f690b96130a5e24f1b8fabbea9c7"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:2f66d9cd9147ee495a8374a45ca445819f8929a3efcd2e3df6428e46c3cbb10b"}, + {file = "cryptography-42.0.8-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d45b940883a03e19e944456a558b67a41160e367a719833c53de6911cabba2b7"}, + {file = "cryptography-42.0.8-cp37-abi3-win32.whl", hash = "sha256:a0c5b2b0585b6af82d7e385f55a8bc568abff8923af147ee3c07bd8b42cda8b2"}, + {file = "cryptography-42.0.8-cp37-abi3-win_amd64.whl", hash = "sha256:57080dee41209e556a9a4ce60d229244f7a66ef52750f813bfbe18959770cfba"}, + {file = "cryptography-42.0.8-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:dea567d1b0e8bc5764b9443858b673b734100c2871dc93163f58c46a97a83d28"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4783183f7cb757b73b2ae9aed6599b96338eb957233c58ca8f49a49cc32fd5e"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0608251135d0e03111152e41f0cc2392d1e74e35703960d4190b2e0f4ca9c70"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:dc0fdf6787f37b1c6b08e6dfc892d9d068b5bdb671198c72072828b80bd5fe4c"}, + {file = "cryptography-42.0.8-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:9c0c1716c8447ee7dbf08d6db2e5c41c688544c61074b54fc4564196f55c25a7"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:fff12c88a672ab9c9c1cf7b0c80e3ad9e2ebd9d828d955c126be4fd3e5578c9e"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:cafb92b2bc622cd1aa6a1dce4b93307792633f4c5fe1f46c6b97cf67073ec961"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:31f721658a29331f895a5a54e7e82075554ccfb8b163a18719d342f5ffe5ecb1"}, + {file = "cryptography-42.0.8-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b297f90c5723d04bcc8265fc2a0f86d4ea2e0f7ab4b6994459548d3a6b992a14"}, + {file = "cryptography-42.0.8-cp39-abi3-win32.whl", hash = "sha256:2f88d197e66c65be5e42cd72e5c18afbfae3f741742070e3019ac8f4ac57262c"}, + {file = "cryptography-42.0.8-cp39-abi3-win_amd64.whl", hash = "sha256:fa76fbb7596cc5839320000cdd5d0955313696d9511debab7ee7278fc8b5c84a"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ba4f0a211697362e89ad822e667d8d340b4d8d55fae72cdd619389fb5912eefe"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:81884c4d096c272f00aeb1f11cf62ccd39763581645b0812e99a91505fa48e0c"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c9bb2ae11bfbab395bdd072985abde58ea9860ed84e59dbc0463a5d0159f5b71"}, + {file = "cryptography-42.0.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7016f837e15b0a1c119d27ecd89b3515f01f90a8615ed5e9427e30d9cdbfed3d"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5a94eccb2a81a309806027e1670a358b99b8fe8bfe9f8d329f27d72c094dde8c"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dec9b018df185f08483f294cae6ccac29e7a6e0678996587363dc352dc65c842"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:343728aac38decfdeecf55ecab3264b015be68fc2816ca800db649607aeee648"}, + {file = "cryptography-42.0.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:013629ae70b40af70c9a7a5db40abe5d9054e6f4380e50ce769947b73bf3caad"}, + {file = "cryptography-42.0.8.tar.gz", hash = "sha256:8d09d05439ce7baa8e9e95b07ec5b6c886f548deb7e0f69ef25f64b3bce842f2"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "docstring-parser" +version = "0.16" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, + {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, +] + +[[package]] +name = "docutils" +version = "0.21.2" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.9" +files = [ + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, +] + +[[package]] +name = "executing" +version = "2.0.1" +description = "Get the currently executing AST node of a frame, and other information" +optional = false +python-versions = ">=3.5" +files = [ + {file = "executing-2.0.1-py2.py3-none-any.whl", hash = "sha256:eac49ca94516ccc753f9fb5ce82603156e590b27525a8bc32cce8ae302eb61bc"}, + {file = "executing-2.0.1.tar.gz", hash = "sha256:35afe2ce3affba8ee97f2d69927fa823b08b472b7b994e36a52a964b93d16147"}, +] + +[package.extras] +tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] + +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] + +[package.dependencies] +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.0.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipython" +version = "8.26.0" +description = "IPython: Productive Interactive Computing" +optional = false +python-versions = ">=3.10" +files = [ + {file = "ipython-8.26.0-py3-none-any.whl", hash = "sha256:e6b347c27bdf9c32ee9d31ae85defc525755a1869f14057e900675b9e8d6e6ff"}, + {file = "ipython-8.26.0.tar.gz", hash = "sha256:1cec0fbba8404af13facebe83d04436a7434c7400e59f47acf467c64abd0956c"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""} +prompt-toolkit = ">=3.0.41,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5.13.0" +typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""} + +[package.extras] +all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"] +black = ["black"] +doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"] +kernel = ["ipykernel"] +matplotlib = ["matplotlib"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"] +test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"] + +[[package]] +name = "isodate" +version = "0.6.1" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = "*" +files = [ + {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, + {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +description = "Utility functions for Python class constructs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, + {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-context" +version = "5.3.0" +description = "Useful decorators and context managers" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.context-5.3.0-py3-none-any.whl", hash = "sha256:3e16388f7da43d384a1a7cd3452e72e14732ac9fe459678773a3608a812bf266"}, + {file = "jaraco.context-5.3.0.tar.gz", hash = "sha256:c2f67165ce1f9be20f32f650f25d8edfc1646a8aeee48ae06fb35f90763576d2"}, +] + +[package.dependencies] +"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-functools" +version = "4.0.1" +description = "Functools like those found in stdlib" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jaraco.functools-4.0.1-py3-none-any.whl", hash = "sha256:3b24ccb921d6b593bdceb56ce14799204f473976e2a9d4b15b04d0f2c2326664"}, + {file = "jaraco_functools-4.0.1.tar.gz", hash = "sha256:d33fa765374c0611b52f8b3a795f8900869aa88c84769d4d1746cd68fb28c3e8"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["jaraco.classes", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jedi" +version = "0.19.1" +description = "An autocompletion tool for Python that can be used for text editors." +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"}, + {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"}, +] + +[package.dependencies] +parso = ">=0.8.3,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jeepney" +version = "0.8.0" +description = "Low-level, pure Python DBus protocol wrapper." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jeepney-0.8.0-py3-none-any.whl", hash = "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755"}, + {file = "jeepney-0.8.0.tar.gz", hash = "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806"}, +] + +[package.extras] +test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["async_generator", "trio"] + +[[package]] +name = "keyring" +version = "25.2.1" +description = "Store and access your passwords safely." +optional = false +python-versions = ">=3.8" +files = [ + {file = "keyring-25.2.1-py3-none-any.whl", hash = "sha256:2458681cdefc0dbc0b7eb6cf75d0b98e59f9ad9b2d4edd319d18f68bdca95e50"}, + {file = "keyring-25.2.1.tar.gz", hash = "sha256:daaffd42dbda25ddafb1ad5fec4024e5bbcfe424597ca1ca452b299861e49f1b"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} +"jaraco.classes" = "*" +"jaraco.context" = "*" +"jaraco.functools" = "*" +jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} + +[package.extras] +completion = ["shtab (>=1.1.0)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "loguru" +version = "0.6.0" +description = "Python logging made (stupidly) simple" +optional = false +python-versions = ">=3.5" +files = [ + {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, + {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, +] + +[package.dependencies] +colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} +win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} + +[package.extras] +dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)"] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +description = "Inline Matplotlib backend for Jupyter" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "more-itertools" +version = "10.3.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.3.0.tar.gz", hash = "sha256:e5d93ef411224fbcef366a6e8ddc4c5781bc6359d43412a65dd5964e46111463"}, + {file = "more_itertools-10.3.0-py3-none-any.whl", hash = "sha256:ea6a02e24a9161e51faad17a8782b92a0df82c12c1c8886fec7f0c3fa1a1b320"}, +] + +[[package]] +name = "mutagen" +version = "1.47.0" +description = "read and write audio tags for many formats" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mutagen-1.47.0-py3-none-any.whl", hash = "sha256:edd96f50c5907a9539d8e5bba7245f62c9f520aef333d13392a79a4f70aca719"}, + {file = "mutagen-1.47.0.tar.gz", hash = "sha256:719fadef0a978c31b4cf3c956261b3c58b6948b32023078a2117b1de09f0fc99"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nh3" +version = "0.2.17" +description = "Python bindings to the ammonia HTML sanitization library." +optional = false +python-versions = "*" +files = [ + {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:551672fd71d06cd828e282abdb810d1be24e1abb7ae2543a8fa36a71c1006fe9"}, + {file = "nh3-0.2.17-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c551eb2a3876e8ff2ac63dff1585236ed5dfec5ffd82216a7a174f7c5082a78a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:66f17d78826096291bd264f260213d2b3905e3c7fae6dfc5337d49429f1dc9f3"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0316c25b76289cf23be6b66c77d3608a4fdf537b35426280032f432f14291b9a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:22c26e20acbb253a5bdd33d432a326d18508a910e4dcf9a3316179860d53345a"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:85cdbcca8ef10733bd31f931956f7fbb85145a4d11ab9e6742bbf44d88b7e351"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40015514022af31975c0b3bca4014634fa13cb5dc4dbcbc00570acc781316dcc"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ba73a2f8d3a1b966e9cdba7b211779ad8a2561d2dba9674b8a19ed817923f65f"}, + {file = "nh3-0.2.17-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c21bac1a7245cbd88c0b0e4a420221b7bfa838a2814ee5bb924e9c2f10a1120b"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d7a25fd8c86657f5d9d576268e3b3767c5cd4f42867c9383618be8517f0f022a"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:c790769152308421283679a142dbdb3d1c46c79c823008ecea8e8141db1a2062"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b4427ef0d2dfdec10b641ed0bdaf17957eb625b2ec0ea9329b3d28806c153d71"}, + {file = "nh3-0.2.17-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a3f55fabe29164ba6026b5ad5c3151c314d136fd67415a17660b4aaddacf1b10"}, + {file = "nh3-0.2.17-cp37-abi3-win32.whl", hash = "sha256:1a814dd7bba1cb0aba5bcb9bebcc88fd801b63e21e2450ae6c52d3b3336bc911"}, + {file = "nh3-0.2.17-cp37-abi3-win_amd64.whl", hash = "sha256:1aa52a7def528297f256de0844e8dd680ee279e79583c76d6fa73a978186ddfb"}, + {file = "nh3-0.2.17.tar.gz", hash = "sha256:40d0741a19c3d645e54efba71cb0d8c475b59135c1e3c580f879ad5514cbf028"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +description = "A Python Parser" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[package.extras] +qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] +testing = ["docopt", "pytest"] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +description = "Pexpect allows easy control of interactive console applications." +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pkginfo" +version = "1.11.1" +description = "Query metadata from sdists / bdists / installed packages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pkginfo-1.11.1-py3-none-any.whl", hash = "sha256:bfa76a714fdfc18a045fcd684dbfc3816b603d9d075febef17cb6582bea29573"}, + {file = "pkginfo-1.11.1.tar.gz", hash = "sha256:2e0dca1cf4c8e39644eed32408ea9966ee15e0d324c62ba899a393b3c6b467aa"}, +] + +[package.extras] +testing = ["pytest", "pytest-cov", "wheel"] + +[[package]] +name = "platformdirs" +version = "4.2.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, + {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] +type = ["mypy (>=1.8)"] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "prompt-toolkit" +version = "3.0.47" +description = "Library for building powerful interactive command lines in Python" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"}, + {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pycryptodomex" +version = "3.20.0" +description = "Cryptographic library for Python" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "pycryptodomex-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:645bd4ca6f543685d643dadf6a856cc382b654cc923460e3a10a49c1b3832aeb"}, + {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ff5c9a67f8a4fba4aed887216e32cbc48f2a6fb2673bb10a99e43be463e15913"}, + {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8ee606964553c1a0bc74057dd8782a37d1c2bc0f01b83193b6f8bb14523b877b"}, + {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7805830e0c56d88f4d491fa5ac640dfc894c5ec570d1ece6ed1546e9df2e98d6"}, + {file = "pycryptodomex-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:bc3ee1b4d97081260d92ae813a83de4d2653206967c4a0a017580f8b9548ddbc"}, + {file = "pycryptodomex-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:8af1a451ff9e123d0d8bd5d5e60f8e3315c3a64f3cdd6bc853e26090e195cdc8"}, + {file = "pycryptodomex-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cbe71b6712429650e3883dc81286edb94c328ffcd24849accac0a4dbcc76958a"}, + {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:76bd15bb65c14900d98835fcd10f59e5e0435077431d3a394b60b15864fddd64"}, + {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:653b29b0819605fe0898829c8ad6400a6ccde096146730c2da54eede9b7b8baa"}, + {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a5ec91388984909bb5398ea49ee61b68ecb579123694bffa172c3b0a107079"}, + {file = "pycryptodomex-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:108e5f1c1cd70ffce0b68739c75734437c919d2eaec8e85bffc2c8b4d2794305"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:59af01efb011b0e8b686ba7758d59cf4a8263f9ad35911bfe3f416cee4f5c08c"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e186342cfcc3aafaad565cbd496060e5a614b441cacc3995ef0091115c1f6c5"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:25cd61e846aaab76d5791d006497134602a9e451e954833018161befc3b5b9ed"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:9c682436c359b5ada67e882fec34689726a09c461efd75b6ea77b2403d5665b7"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-win32.whl", hash = "sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e"}, + {file = "pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc"}, + {file = "pycryptodomex-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458"}, + {file = "pycryptodomex-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c"}, + {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b"}, + {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea"}, + {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781"}, + {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88afd7a3af7ddddd42c2deda43d53d3dfc016c11327d0915f90ca34ebda91499"}, + {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d3584623e68a5064a04748fb6d76117a21a7cb5eaba20608a41c7d0c61721794"}, + {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1"}, + {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dcac11031a71348faaed1f403a0debd56bf5404232284cf8c761ff918886ebc"}, + {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69138068268127cd605e03438312d8f271135a33140e2742b417d027a0539427"}, + {file = "pycryptodomex-3.20.0.tar.gz", hash = "sha256:7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e"}, +] + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-dependency" +version = "0.5.1" +description = "Manage dependencies of tests" +optional = false +python-versions = "*" +files = [ + {file = "pytest-dependency-0.5.1.tar.gz", hash = "sha256:c2a892906192663f85030a6ab91304e508e546cddfe557d692d61ec57a1d946b"}, +] + +[package.dependencies] +pytest = ">=3.6.0" + +[[package]] +name = "pywin32-ctypes" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, +] + +[[package]] +name = "readme-renderer" +version = "43.0" +description = "readme_renderer is a library for rendering readme descriptions for Warehouse" +optional = false +python-versions = ">=3.8" +files = [ + {file = "readme_renderer-43.0-py3-none-any.whl", hash = "sha256:19db308d86ecd60e5affa3b2a98f017af384678c63c88e5d4556a380e674f3f9"}, + {file = "readme_renderer-43.0.tar.gz", hash = "sha256:1818dd28140813509eeed8d62687f7cd4f7bad90d4db586001c5dc09d4fde311"}, +] + +[package.dependencies] +docutils = ">=0.13.1" +nh3 = ">=0.2.14" +Pygments = ">=2.5.1" + +[package.extras] +md = ["cmarkgfm (>=0.8.0)"] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rfc3986" +version = "2.0.0" +description = "Validating URI References per RFC 3986" +optional = false +python-versions = ">=3.7" +files = [ + {file = "rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd"}, + {file = "rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c"}, +] + +[package.extras] +idna2008 = ["idna"] + +[[package]] +name = "rich" +version = "13.7.1" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, + {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +description = "Extract data from python stack frames and tracebacks for informative displays" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "tqdm" +version = "4.66.4" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"}, + {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.14.3" +description = "Traitlets Python configuration system" +optional = false +python-versions = ">=3.8" +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"] + +[[package]] +name = "twine" +version = "4.0.2" +description = "Collection of utilities for publishing packages on PyPI" +optional = false +python-versions = ">=3.7" +files = [ + {file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"}, + {file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"}, +] + +[package.dependencies] +importlib-metadata = ">=3.6" +keyring = ">=15.1" +pkginfo = ">=1.8.1" +readme-renderer = ">=35.0" +requests = ">=2.20" +requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" +rfc3986 = ">=1.4.0" +rich = ">=12.0.0" +urllib3 = ">=1.26.0" + +[[package]] +name = "typing-extensions" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "websockets" +version = "12.0" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, + {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, + {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, + {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, + {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, + {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, + {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, + {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, + {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, + {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, + {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, + {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, + {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, + {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, + {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, + {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, + {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, + {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, + {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, + {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, + {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, + {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, + {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, + {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, + {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, + {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, + {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, + {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, + {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, + {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, + {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, + {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, + {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, + {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, + {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, + {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, + {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, + {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, + {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, + {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, + {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, + {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +] + +[[package]] +name = "wheel" +version = "0.37.1" +description = "A built-package format for Python" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +files = [ + {file = "wheel-0.37.1-py2.py3-none-any.whl", hash = "sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a"}, + {file = "wheel-0.37.1.tar.gz", hash = "sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"}, +] + +[package.extras] +test = ["pytest (>=3.0.0)", "pytest-cov"] + +[[package]] +name = "win32-setctime" +version = "1.1.0" +description = "A small Python utility to set file creation time on Windows" +optional = false +python-versions = ">=3.5" +files = [ + {file = "win32_setctime-1.1.0-py3-none-any.whl", hash = "sha256:231db239e959c2fe7eb1d7dc129f11172354f98361c4fa2d6d2d7e278baa8aad"}, + {file = "win32_setctime-1.1.0.tar.gz", hash = "sha256:15cf5750465118d6929ae4de4eb46e8edae9a5634350c01ba582df868e932cb2"}, +] + +[package.extras] +dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] + +[[package]] +name = "yt-dlp" +version = "2023.12.30" +description = "A youtube-dl fork with additional features and patches" +optional = false +python-versions = ">=3.8" +files = [ + {file = "yt-dlp-2023.12.30.tar.gz", hash = "sha256:a11862e57721b0a0f0883dfeb5a4d79ba213a2d4c45e1880e9fd70f8e6570c38"}, + {file = "yt_dlp-2023.12.30-py2.py3-none-any.whl", hash = "sha256:c00d9a71d64472ad441bcaa1ec0c3797d6e60c9f934f270096a96fe51657e7b3"}, +] + +[package.dependencies] +brotli = {version = "*", markers = "implementation_name == \"cpython\""} +brotlicffi = {version = "*", markers = "implementation_name != \"cpython\""} +certifi = "*" +mutagen = "*" +pycryptodomex = "*" +requests = ">=2.31.0,<3" +urllib3 = ">=1.26.17,<3" +websockets = ">=12.0" + +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "eacd967a1ad7943e647ca8ed9ad1a0f9cb4d2d22c7ab51529b166fd9bc568fe5" diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..dd33ca4 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,33 @@ +[tool.poetry] +name = "youtool" +version = "0.1.1" +description = "Easy-to-use library to access YouTube Data API v3 in bulk operations" +authors = ["Álvaro Justen "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.11" +loguru = "^0.6.0" +tqdm = "^4.64.0" +yt-dlp = "^2023.4.1" +chat-downloader = "0.2.8" +isodate = "^0.6.1" +requests = "^2.28.1" + +[tool.poetry.dev-dependencies] +autoflake = "^1.4" +black = "^23.3.0" +flake8 = "^5.0.4" +ipython = "^8.4.0" +isort = "^5.10.1" +pytest = "^7.1.2" +pytest-dependency = "^0.5.1" +twine = "^4.0.1" +wheel = "^0.37.1" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry.scripts] +youtool = "youtool.cli:main" diff --git a/setup.py b/setup.py deleted file mode 100644 index 31ff255..0000000 --- a/setup.py +++ /dev/null @@ -1,42 +0,0 @@ -from setuptools import setup, find_packages - -setup( - name="youtool", - version="0.1.0", - packages=find_packages(), - install_requires=[ - 'youtool[cli]', - ], - extras_require={ - 'cli': [ - 'loguru', - 'tqdm' - ], - 'transcription': [ - 'yt-dlp' - ], - 'livechat': [ - 'chat-downloader' - ], - 'dev': [ - 'autoflake', - 'black', - 'flake8', - 'ipython', - 'isort', - 'pytest', - 'pytest-dependency', - 'twine', - 'wheel' - ], - 'base': [ - 'isodate', - 'requests' - ], - }, - entry_points={ - 'console_scripts': [ - 'youtool=youtool.cli:main', - ], - }, -) From 0644cb3563eef47429bce0d85bd22b46e303de97 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 22:24:57 -0300 Subject: [PATCH 093/115] add updates poetry config --- .dockerignore | 1 - Dockerfile | 16 +-- poetry.lock | 259 ++++++++++++++++++++++++++++++++++--------------- pyproject.toml | 30 +++--- 4 files changed, 205 insertions(+), 101 deletions(-) diff --git a/.dockerignore b/.dockerignore index 2671039..2579097 100644 --- a/.dockerignore +++ b/.dockerignore @@ -13,7 +13,6 @@ .gitignore .pytest_cache Dockerfile -README.md build data dist diff --git a/Dockerfile b/Dockerfile index 9b88ea5..2cf2f89 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,12 +11,12 @@ RUN apt update \ && apt clean \ && rm -rf /var/lib/apt/lists/* -COPY requirements/ /app/requirements -RUN pip install --no-cache-dir -U pip \ - && pip install --no-cache-dir -r /app/requirements/base.txt \ - && pip install --no-cache-dir -r /app/requirements/cli.txt \ - && pip install --no-cache-dir -r /app/requirements/livechat.txt \ - && pip install --no-cache-dir -r /app/requirements/transcription.txt \ - && if [ "$DEV_BUILD" = "true" ]; then pip install --no-cache-dir -r /app/requirements/dev.txt; fi - COPY . /app/ + +RUN pip install --no-cache-dir -U --upgrade pip \ + && if [ "$DEV_BUILD" = "true" ]; \ + then \ + pip install poetry==1.4.2; \ + poetry export -f requirements.txt --output requirements.txt --without-hashes --with dev; \ + pip install --no-cache-dir -r requirements.txt; \ + else pip install /app; fi diff --git a/poetry.lock b/poetry.lock index 90f741d..b00b8c9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" +category = "dev" optional = false python-versions = "*" files = [ @@ -20,22 +21,24 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] [[package]] name = "autoflake" -version = "1.7.8" +version = "2.3.1" description = "Removes unused imports and unused variables" +category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "autoflake-1.7.8-py3-none-any.whl", hash = "sha256:46373ef69b6714f5064c923bb28bd797c4f8a9497f557d87fc36665c6d956b39"}, - {file = "autoflake-1.7.8.tar.gz", hash = "sha256:e7e46372dee46fa1c97acf310d99d922b63d369718a270809d7c278d34a194cf"}, + {file = "autoflake-2.3.1-py3-none-any.whl", hash = "sha256:3ae7495db9084b7b32818b4140e6dc4fc280b712fb414f5b8fe57b0a8e85a840"}, + {file = "autoflake-2.3.1.tar.gz", hash = "sha256:c98b75dc5b0a86459c4f01a1d32ac7eb4338ec4317a4469515ff1e687ecd909e"}, ] [package.dependencies] -pyflakes = ">=1.1.0,<3" +pyflakes = ">=3.0.0" [[package]] name = "backports-tarfile" version = "1.2.0" description = "Backport of CPython tarfile module" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -49,33 +52,34 @@ testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-ch [[package]] name = "black" -version = "23.12.1" +version = "24.4.2" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "black-23.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0aaf6041986767a5e0ce663c7a2f0e9eaf21e6ff87a5f95cbf3675bfd4c41d2"}, - {file = "black-23.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c88b3711d12905b74206227109272673edce0cb29f27e1385f33b0163c414bba"}, - {file = "black-23.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a920b569dc6b3472513ba6ddea21f440d4b4c699494d2e972a1753cdc25df7b0"}, - {file = "black-23.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:3fa4be75ef2a6b96ea8d92b1587dd8cb3a35c7e3d51f0738ced0781c3aa3a5a3"}, - {file = "black-23.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8d4df77958a622f9b5a4c96edb4b8c0034f8434032ab11077ec6c56ae9f384ba"}, - {file = "black-23.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:602cfb1196dc692424c70b6507593a2b29aac0547c1be9a1d1365f0d964c353b"}, - {file = "black-23.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c4352800f14be5b4864016882cdba10755bd50805c95f728011bcb47a4afd59"}, - {file = "black-23.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:0808494f2b2df923ffc5723ed3c7b096bd76341f6213989759287611e9837d50"}, - {file = "black-23.12.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:25e57fd232a6d6ff3f4478a6fd0580838e47c93c83eaf1ccc92d4faf27112c4e"}, - {file = "black-23.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9e13db441c509a3763a7a3d9a49ccc1b4e974a47be4e08ade2a228876500ec"}, - {file = "black-23.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d1bd9c210f8b109b1762ec9fd36592fdd528485aadb3f5849b2740ef17e674e"}, - {file = "black-23.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:ae76c22bde5cbb6bfd211ec343ded2163bba7883c7bc77f6b756a1049436fbb9"}, - {file = "black-23.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1fa88a0f74e50e4487477bc0bb900c6781dbddfdfa32691e780bf854c3b4a47f"}, - {file = "black-23.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4d6a9668e45ad99d2f8ec70d5c8c04ef4f32f648ef39048d010b0689832ec6d"}, - {file = "black-23.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18fb2ae6c4bb63eebe5be6bd869ba2f14fd0259bda7d18a46b764d8fb86298a"}, - {file = "black-23.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:c04b6d9d20e9c13f43eee8ea87d44156b8505ca8a3c878773f68b4e4812a421e"}, - {file = "black-23.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3e1b38b3135fd4c025c28c55ddfc236b05af657828a8a6abe5deec419a0b7055"}, - {file = "black-23.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4f0031eaa7b921db76decd73636ef3a12c942ed367d8c3841a0739412b260a54"}, - {file = "black-23.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97e56155c6b737854e60a9ab1c598ff2533d57e7506d97af5481141671abf3ea"}, - {file = "black-23.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:dd15245c8b68fe2b6bd0f32c1556509d11bb33aec9b5d0866dd8e2ed3dba09c2"}, - {file = "black-23.12.1-py3-none-any.whl", hash = "sha256:78baad24af0f033958cad29731e27363183e140962595def56423e626f4bee3e"}, - {file = "black-23.12.1.tar.gz", hash = "sha256:4ce3ef14ebe8d9509188014d96af1c456a910d5b5cbf434a09fef7e024b3d0d5"}, + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -95,6 +99,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "brotli" version = "1.1.0" description = "Python bindings for the Brotli compression library" +category = "main" optional = false python-versions = "*" files = [ @@ -187,6 +192,7 @@ files = [ name = "brotlicffi" version = "1.1.0.0" description = "Python CFFI bindings to the Brotli library" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -226,6 +232,7 @@ cffi = ">=1.0.0" name = "certifi" version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -237,6 +244,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -301,6 +309,7 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -400,6 +409,7 @@ files = [ name = "chat-downloader" version = "0.2.8" description = "A simple tool used to retrieve chat messages from livestreams, videos, clips and past broadcasts. No authentication needed!" +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -421,6 +431,7 @@ dev = ["coverage", "flake8", "pytest", "sphinx", "sphinx-rtd-theme", "sphinxcont name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -435,6 +446,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -446,6 +458,7 @@ files = [ name = "colorlog" version = "6.8.2" description = "Add colours to the output of Python's logging module." +category = "main" optional = false python-versions = ">=3.6" files = [ @@ -463,6 +476,7 @@ development = ["black", "flake8", "mypy", "pytest", "types-colorama"] name = "cryptography" version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -517,6 +531,7 @@ test-randomorder = ["pytest-randomly"] name = "decorator" version = "5.1.1" description = "Decorators for Humans" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -528,6 +543,7 @@ files = [ name = "docstring-parser" version = "0.16" description = "Parse Python docstrings in reST, Google and Numpydoc format" +category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -539,6 +555,7 @@ files = [ name = "docutils" version = "0.21.2" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -550,6 +567,7 @@ files = [ name = "executing" version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -562,24 +580,26 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth [[package]] name = "flake8" -version = "5.0.4" +version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.8.1" files = [ - {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, - {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, + {file = "flake8-7.1.0-py2.py3-none-any.whl", hash = "sha256:2e416edcc62471a64cea09353f4e7bdba32aeb079b6e360554c659a122b1bc6a"}, + {file = "flake8-7.1.0.tar.gz", hash = "sha256:48a07b626b55236e0fb4784ee69a465fbf59d79eec1f5b4785c3d3bc57d17aa5"}, ] [package.dependencies] mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.9.0,<2.10.0" -pyflakes = ">=2.5.0,<2.6.0" +pycodestyle = ">=2.12.0,<2.13.0" +pyflakes = ">=3.2.0,<3.3.0" [[package]] name = "idna" version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -591,6 +611,7 @@ files = [ name = "importlib-metadata" version = "8.0.0" description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -610,6 +631,7 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -621,6 +643,7 @@ files = [ name = "ipython" version = "8.26.0" description = "IPython: Productive Interactive Computing" +category = "dev" optional = false python-versions = ">=3.10" files = [ @@ -658,6 +681,7 @@ test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "num name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" +category = "main" optional = false python-versions = "*" files = [ @@ -672,6 +696,7 @@ six = "*" name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -686,6 +711,7 @@ colors = ["colorama (>=0.4.6)"] name = "jaraco-classes" version = "3.4.0" description = "Utility functions for Python class constructs" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -704,6 +730,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-ena name = "jaraco-context" version = "5.3.0" description = "Useful decorators and context managers" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -722,6 +749,7 @@ testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytes name = "jaraco-functools" version = "4.0.1" description = "Functools like those found in stdlib" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -740,6 +768,7 @@ testing = ["jaraco.classes", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -759,6 +788,7 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -774,6 +804,7 @@ trio = ["async_generator", "trio"] name = "keyring" version = "25.2.1" description = "Store and access your passwords safely." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -797,13 +828,14 @@ testing = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "py [[package]] name = "loguru" -version = "0.6.0" +version = "0.7.2" description = "Python logging made (stupidly) simple" +category = "main" optional = false python-versions = ">=3.5" files = [ - {file = "loguru-0.6.0-py3-none-any.whl", hash = "sha256:4e2414d534a2ab57573365b3e6d0234dfb1d84b68b7f3b948e6fb743860a77c3"}, - {file = "loguru-0.6.0.tar.gz", hash = "sha256:066bd06758d0a513e9836fd9c6b5a75bfb3fd36841f4b996bc60b547a309d41c"}, + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, ] [package.dependencies] @@ -811,12 +843,13 @@ colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] -dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils (==0.16)", "flake8 (>=3.7.7)", "isort (>=5.1.1)", "pytest (>=4.6.2)", "pytest-cov (>=2.7.1)", "sphinx-autobuild (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "tox (>=3.9.0)"] +dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] [[package]] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -841,6 +874,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "matplotlib-inline" version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -855,6 +889,7 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -866,6 +901,7 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -877,6 +913,7 @@ files = [ name = "more-itertools" version = "10.3.0" description = "More routines for operating on iterables, beyond itertools" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -888,6 +925,7 @@ files = [ name = "mutagen" version = "1.47.0" description = "read and write audio tags for many formats" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -899,6 +937,7 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -910,6 +949,7 @@ files = [ name = "nh3" version = "0.2.17" description = "Python bindings to the ammonia HTML sanitization library." +category = "dev" optional = false python-versions = "*" files = [ @@ -935,6 +975,7 @@ files = [ name = "packaging" version = "24.1" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -946,6 +987,7 @@ files = [ name = "parso" version = "0.8.4" description = "A Python Parser" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -961,6 +1003,7 @@ testing = ["docopt", "pytest"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -972,6 +1015,7 @@ files = [ name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." +category = "dev" optional = false python-versions = "*" files = [ @@ -984,13 +1028,14 @@ ptyprocess = ">=0.5" [[package]] name = "pkginfo" -version = "1.11.1" +version = "1.10.0" description = "Query metadata from sdists / bdists / installed packages." +category = "dev" optional = false -python-versions = ">=3.8" +python-versions = ">=3.6" files = [ - {file = "pkginfo-1.11.1-py3-none-any.whl", hash = "sha256:bfa76a714fdfc18a045fcd684dbfc3816b603d9d075febef17cb6582bea29573"}, - {file = "pkginfo-1.11.1.tar.gz", hash = "sha256:2e0dca1cf4c8e39644eed32408ea9966ee15e0d324c62ba899a393b3c6b467aa"}, + {file = "pkginfo-1.10.0-py3-none-any.whl", hash = "sha256:889a6da2ed7ffc58ab5b900d888ddce90bce912f2d2de1dc1c26f4cb9fe65097"}, + {file = "pkginfo-1.10.0.tar.gz", hash = "sha256:5df73835398d10db79f8eecd5cd86b1f6d29317589ea70796994d49399af6297"}, ] [package.extras] @@ -1000,6 +1045,7 @@ testing = ["pytest", "pytest-cov", "wheel"] name = "platformdirs" version = "4.2.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1016,6 +1062,7 @@ type = ["mypy (>=1.8)"] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1031,6 +1078,7 @@ testing = ["pytest", "pytest-benchmark"] name = "prompt-toolkit" version = "3.0.47" description = "Library for building powerful interactive command lines in Python" +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -1045,6 +1093,7 @@ wcwidth = "*" name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -1056,6 +1105,7 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" +category = "dev" optional = false python-versions = "*" files = [ @@ -1068,19 +1118,21 @@ tests = ["pytest"] [[package]] name = "pycodestyle" -version = "2.9.1" +version = "2.12.0" description = "Python style guide checker" +category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, - {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, + {file = "pycodestyle-2.12.0-py2.py3-none-any.whl", hash = "sha256:949a39f6b86c3e1515ba1787c2022131d165a8ad271b11370a8819aa070269e4"}, + {file = "pycodestyle-2.12.0.tar.gz", hash = "sha256:442f950141b4f43df752dd303511ffded3a04c2b6fb7f65980574f0c31e6e79c"}, ] [[package]] name = "pycparser" version = "2.22" description = "C parser in Python" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1092,6 +1144,7 @@ files = [ name = "pycryptodomex" version = "3.20.0" description = "Cryptographic library for Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1131,19 +1184,21 @@ files = [ [[package]] name = "pyflakes" -version = "2.5.0" +version = "3.2.0" description = "passive checker of Python programs" +category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, - {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, + {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, + {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, ] [[package]] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1156,41 +1211,45 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" -version = "7.4.4" +version = "8.2.2" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" +pluggy = ">=1.5,<2.0" [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-dependency" -version = "0.5.1" +version = "0.6.0" description = "Manage dependencies of tests" +category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.4" files = [ - {file = "pytest-dependency-0.5.1.tar.gz", hash = "sha256:c2a892906192663f85030a6ab91304e508e546cddfe557d692d61ec57a1d946b"}, + {file = "pytest-dependency-0.6.0.tar.gz", hash = "sha256:934b0e6a39d95995062c193f7eaeed8a8ffa06ff1bcef4b62b0dc74a708bacc1"}, ] [package.dependencies] -pytest = ">=3.6.0" +pytest = ">=3.7.0" +setuptools = "*" [[package]] name = "pywin32-ctypes" version = "0.2.2" description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1202,6 +1261,7 @@ files = [ name = "readme-renderer" version = "43.0" description = "readme_renderer is a library for rendering readme descriptions for Warehouse" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1221,6 +1281,7 @@ md = ["cmarkgfm (>=0.8.0)"] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1242,6 +1303,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1256,6 +1318,7 @@ requests = ">=2.0.1,<3.0.0" name = "rfc3986" version = "2.0.0" description = "Validating URI References per RFC 3986" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1270,6 +1333,7 @@ idna2008 = ["idna"] name = "rich" version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -1288,6 +1352,7 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1299,10 +1364,27 @@ files = [ cryptography = ">=2.0" jeepney = ">=0.6" +[[package]] +name = "setuptools" +version = "70.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, + {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1314,6 +1396,7 @@ files = [ name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" optional = false python-versions = "*" files = [ @@ -1333,6 +1416,7 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "tqdm" version = "4.66.4" description = "Fast, Extensible Progress Meter" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1353,6 +1437,7 @@ telegram = ["requests"] name = "traitlets" version = "5.14.3" description = "Traitlets Python configuration system" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1366,19 +1451,20 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, [[package]] name = "twine" -version = "4.0.2" +version = "5.1.1" description = "Collection of utilities for publishing packages on PyPI" +category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "twine-4.0.2-py3-none-any.whl", hash = "sha256:929bc3c280033347a00f847236564d1c52a3e61b1ac2516c97c48f3ceab756d8"}, - {file = "twine-4.0.2.tar.gz", hash = "sha256:9e102ef5fdd5a20661eb88fad46338806c3bd32cf1db729603fe3697b1bc83c8"}, + {file = "twine-5.1.1-py3-none-any.whl", hash = "sha256:215dbe7b4b94c2c50a7315c0275d2258399280fbb7d04182c7e55e24b5f93997"}, + {file = "twine-5.1.1.tar.gz", hash = "sha256:9aa0825139c02b3434d913545c7b847a21c835e11597f5255842d457da2322db"}, ] [package.dependencies] importlib-metadata = ">=3.6" keyring = ">=15.1" -pkginfo = ">=1.8.1" +pkginfo = ">=1.8.1,<1.11" readme-renderer = ">=35.0" requests = ">=2.20" requests-toolbelt = ">=0.8.0,<0.9.0 || >0.9.0" @@ -1390,6 +1476,7 @@ urllib3 = ">=1.26.0" name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1401,6 +1488,7 @@ files = [ name = "urllib3" version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1418,6 +1506,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" optional = false python-versions = "*" files = [ @@ -1429,6 +1518,7 @@ files = [ name = "websocket-client" version = "1.8.0" description = "WebSocket client for Python with low level API options" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1445,6 +1535,7 @@ test = ["websockets"] name = "websockets" version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1524,22 +1615,24 @@ files = [ [[package]] name = "wheel" -version = "0.37.1" +version = "0.43.0" description = "A built-package format for Python" +category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.8" files = [ - {file = "wheel-0.37.1-py2.py3-none-any.whl", hash = "sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a"}, - {file = "wheel-0.37.1.tar.gz", hash = "sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4"}, + {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, + {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, ] [package.extras] -test = ["pytest (>=3.0.0)", "pytest-cov"] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [[package]] name = "win32-setctime" version = "1.1.0" description = "A small Python utility to set file creation time on Windows" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1552,13 +1645,14 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "yt-dlp" -version = "2023.12.30" -description = "A youtube-dl fork with additional features and patches" +version = "2024.7.2" +description = "A feature-rich command-line audio/video downloader" +category = "main" optional = false python-versions = ">=3.8" files = [ - {file = "yt-dlp-2023.12.30.tar.gz", hash = "sha256:a11862e57721b0a0f0883dfeb5a4d79ba213a2d4c45e1880e9fd70f8e6570c38"}, - {file = "yt_dlp-2023.12.30-py2.py3-none-any.whl", hash = "sha256:c00d9a71d64472ad441bcaa1ec0c3797d6e60c9f934f270096a96fe51657e7b3"}, + {file = "yt_dlp-2024.7.2-py3-none-any.whl", hash = "sha256:4f76b48244c783e6ac06e8d7627bcf62cbeb4f6d79ba7e3cfc8249e680d4e691"}, + {file = "yt_dlp-2024.7.2.tar.gz", hash = "sha256:2b0c86b579d4a044eaf3c4b00e3d7b24d82e6e26869fa11c288ea4395b387f41"}, ] [package.dependencies] @@ -1567,14 +1661,25 @@ brotlicffi = {version = "*", markers = "implementation_name != \"cpython\""} certifi = "*" mutagen = "*" pycryptodomex = "*" -requests = ">=2.31.0,<3" +requests = ">=2.32.2,<3" urllib3 = ">=1.26.17,<3" websockets = ">=12.0" +[package.extras] +build = ["build", "hatchling", "pip", "setuptools", "wheel"] +curl-cffi = ["curl-cffi (==0.5.10)"] +dev = ["autopep8 (>=2.0,<3.0)", "pre-commit", "pytest (>=8.1,<9.0)", "ruff (>=0.5.0,<0.6.0)"] +py2exe = ["py2exe (>=0.12)"] +pyinstaller = ["pyinstaller (>=6.7.0)"] +secretstorage = ["cffi", "secretstorage"] +static-analysis = ["autopep8 (>=2.0,<3.0)", "ruff (>=0.5.0,<0.6.0)"] +test = ["pytest (>=8.1,<9.0)"] + [[package]] name = "zipp" version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1589,4 +1694,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "eacd967a1ad7943e647ca8ed9ad1a0f9cb4d2d22c7ab51529b166fd9bc568fe5" +content-hash = "a683728686b53f0f52c192f5acaa346bd904671d57c0423060db71371fc6f347" diff --git a/pyproject.toml b/pyproject.toml index dd33ca4..54626ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,23 +7,23 @@ readme = "README.md" [tool.poetry.dependencies] python = "^3.11" -loguru = "^0.6.0" -tqdm = "^4.64.0" -yt-dlp = "^2023.4.1" -chat-downloader = "0.2.8" +loguru = "^0.7.2" +tqdm = "^4.66.4" +yt-dlp = "^2024.7.2" +chat-downloader = "^0.2.8" isodate = "^0.6.1" -requests = "^2.28.1" +requests = "^2.32.3" -[tool.poetry.dev-dependencies] -autoflake = "^1.4" -black = "^23.3.0" -flake8 = "^5.0.4" -ipython = "^8.4.0" -isort = "^5.10.1" -pytest = "^7.1.2" -pytest-dependency = "^0.5.1" -twine = "^4.0.1" -wheel = "^0.37.1" +[tool.poetry.group.dev.dependencies] +pytest = "^8.2.2" +autoflake = "^2.3.1" +black = "^24.4.2" +flake8 = "^7.1.0" +ipython = "^8.26.0" +isort = "^5.13.2" +pytest-dependency = "^0.6.0" +twine = "^5.1.1" +wheel = "^0.43.0" [build-system] requires = ["poetry-core"] From e5e232fa53d31da933ffe340a32a45517c4d592e Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 23:39:34 -0300 Subject: [PATCH 094/115] add dependencie --- poetry.lock | 100 ++++++++++--------------------------------------- pyproject.toml | 1 + 2 files changed, 20 insertions(+), 81 deletions(-) diff --git a/poetry.lock b/poetry.lock index b00b8c9..e699162 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "asttokens" version = "2.4.1" description = "Annotate AST trees with source code positions" -category = "dev" optional = false python-versions = "*" files = [ @@ -23,7 +22,6 @@ test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"] name = "autoflake" version = "2.3.1" description = "Removes unused imports and unused variables" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -38,7 +36,6 @@ pyflakes = ">=3.0.0" name = "backports-tarfile" version = "1.2.0" description = "Backport of CPython tarfile module" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -54,7 +51,6 @@ testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-ch name = "black" version = "24.4.2" description = "The uncompromising code formatter." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -99,7 +95,6 @@ uvloop = ["uvloop (>=0.15.2)"] name = "brotli" version = "1.1.0" description = "Python bindings for the Brotli compression library" -category = "main" optional = false python-versions = "*" files = [ @@ -192,7 +187,6 @@ files = [ name = "brotlicffi" version = "1.1.0.0" description = "Python CFFI bindings to the Brotli library" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -232,7 +226,6 @@ cffi = ">=1.0.0" name = "certifi" version = "2024.7.4" description = "Python package for providing Mozilla's CA Bundle." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -244,7 +237,6 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -309,7 +301,6 @@ pycparser = "*" name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" optional = false python-versions = ">=3.7.0" files = [ @@ -409,7 +400,6 @@ files = [ name = "chat-downloader" version = "0.2.8" description = "A simple tool used to retrieve chat messages from livestreams, videos, clips and past broadcasts. No authentication needed!" -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -431,7 +421,6 @@ dev = ["coverage", "flake8", "pytest", "sphinx", "sphinx-rtd-theme", "sphinxcont name = "click" version = "8.1.7" description = "Composable command line interface toolkit" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -446,7 +435,6 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -458,7 +446,6 @@ files = [ name = "colorlog" version = "6.8.2" description = "Add colours to the output of Python's logging module." -category = "main" optional = false python-versions = ">=3.6" files = [ @@ -476,7 +463,6 @@ development = ["black", "flake8", "mypy", "pytest", "types-colorama"] name = "cryptography" version = "42.0.8" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -531,7 +517,6 @@ test-randomorder = ["pytest-randomly"] name = "decorator" version = "5.1.1" description = "Decorators for Humans" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -543,7 +528,6 @@ files = [ name = "docstring-parser" version = "0.16" description = "Parse Python docstrings in reST, Google and Numpydoc format" -category = "main" optional = false python-versions = ">=3.6,<4.0" files = [ @@ -555,7 +539,6 @@ files = [ name = "docutils" version = "0.21.2" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.9" files = [ @@ -567,7 +550,6 @@ files = [ name = "executing" version = "2.0.1" description = "Get the currently executing AST node of a frame, and other information" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -582,7 +564,6 @@ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipyth name = "flake8" version = "7.1.0" description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" optional = false python-versions = ">=3.8.1" files = [ @@ -599,7 +580,6 @@ pyflakes = ">=3.2.0,<3.3.0" name = "idna" version = "3.7" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -611,7 +591,6 @@ files = [ name = "importlib-metadata" version = "8.0.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -631,7 +610,6 @@ test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "p name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -643,7 +621,6 @@ files = [ name = "ipython" version = "8.26.0" description = "IPython: Productive Interactive Computing" -category = "dev" optional = false python-versions = ">=3.10" files = [ @@ -681,7 +658,6 @@ test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "num name = "isodate" version = "0.6.1" description = "An ISO 8601 date/time/duration parser and formatter" -category = "main" optional = false python-versions = "*" files = [ @@ -696,7 +672,6 @@ six = "*" name = "isort" version = "5.13.2" description = "A Python utility / library to sort Python imports." -category = "dev" optional = false python-versions = ">=3.8.0" files = [ @@ -711,7 +686,6 @@ colors = ["colorama (>=0.4.6)"] name = "jaraco-classes" version = "3.4.0" description = "Utility functions for Python class constructs" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -730,7 +704,6 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-ena name = "jaraco-context" version = "5.3.0" description = "Useful decorators and context managers" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -749,7 +722,6 @@ testing = ["portend", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytes name = "jaraco-functools" version = "4.0.1" description = "Functools like those found in stdlib" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -768,7 +740,6 @@ testing = ["jaraco.classes", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest name = "jedi" version = "0.19.1" description = "An autocompletion tool for Python that can be used for text editors." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -788,7 +759,6 @@ testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] name = "jeepney" version = "0.8.0" description = "Low-level, pure Python DBus protocol wrapper." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -804,7 +774,6 @@ trio = ["async_generator", "trio"] name = "keyring" version = "25.2.1" description = "Store and access your passwords safely." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -830,7 +799,6 @@ testing = ["pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "py name = "loguru" version = "0.7.2" description = "Python logging made (stupidly) simple" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -849,7 +817,6 @@ dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptio name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -874,7 +841,6 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] name = "matplotlib-inline" version = "0.1.7" description = "Inline Matplotlib backend for Jupyter" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -889,7 +855,6 @@ traitlets = "*" name = "mccabe" version = "0.7.0" description = "McCabe checker, plugin for flake8" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -901,7 +866,6 @@ files = [ name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -913,7 +877,6 @@ files = [ name = "more-itertools" version = "10.3.0" description = "More routines for operating on iterables, beyond itertools" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -925,7 +888,6 @@ files = [ name = "mutagen" version = "1.47.0" description = "read and write audio tags for many formats" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -937,7 +899,6 @@ files = [ name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -949,7 +910,6 @@ files = [ name = "nh3" version = "0.2.17" description = "Python bindings to the ammonia HTML sanitization library." -category = "dev" optional = false python-versions = "*" files = [ @@ -975,7 +935,6 @@ files = [ name = "packaging" version = "24.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -987,7 +946,6 @@ files = [ name = "parso" version = "0.8.4" description = "A Python Parser" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1003,7 +961,6 @@ testing = ["docopt", "pytest"] name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1015,7 +972,6 @@ files = [ name = "pexpect" version = "4.9.0" description = "Pexpect allows easy control of interactive console applications." -category = "dev" optional = false python-versions = "*" files = [ @@ -1030,7 +986,6 @@ ptyprocess = ">=0.5" name = "pkginfo" version = "1.10.0" description = "Query metadata from sdists / bdists / installed packages." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1045,7 +1000,6 @@ testing = ["pytest", "pytest-cov", "wheel"] name = "platformdirs" version = "4.2.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1062,7 +1016,6 @@ type = ["mypy (>=1.8)"] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1078,7 +1031,6 @@ testing = ["pytest", "pytest-benchmark"] name = "prompt-toolkit" version = "3.0.47" description = "Library for building powerful interactive command lines in Python" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -1093,7 +1045,6 @@ wcwidth = "*" name = "ptyprocess" version = "0.7.0" description = "Run a subprocess in a pseudo terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -1105,7 +1056,6 @@ files = [ name = "pure-eval" version = "0.2.2" description = "Safely evaluate AST nodes without side effects" -category = "dev" optional = false python-versions = "*" files = [ @@ -1120,7 +1070,6 @@ tests = ["pytest"] name = "pycodestyle" version = "2.12.0" description = "Python style guide checker" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1132,7 +1081,6 @@ files = [ name = "pycparser" version = "2.22" description = "C parser in Python" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1144,7 +1092,6 @@ files = [ name = "pycryptodomex" version = "3.20.0" description = "Cryptographic library for Python" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -1186,7 +1133,6 @@ files = [ name = "pyflakes" version = "3.2.0" description = "passive checker of Python programs" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1198,7 +1144,6 @@ files = [ name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1213,7 +1158,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pytest" version = "8.2.2" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1234,7 +1178,6 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments name = "pytest-dependency" version = "0.6.0" description = "Manage dependencies of tests" -category = "dev" optional = false python-versions = ">=3.4" files = [ @@ -1245,11 +1188,27 @@ files = [ pytest = ">=3.7.0" setuptools = "*" +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + [[package]] name = "pywin32-ctypes" version = "0.2.2" description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1261,7 +1220,6 @@ files = [ name = "readme-renderer" version = "43.0" description = "readme_renderer is a library for rendering readme descriptions for Warehouse" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1281,7 +1239,6 @@ md = ["cmarkgfm (>=0.8.0)"] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1303,7 +1260,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests-toolbelt" version = "1.0.0" description = "A utility belt for advanced users of python-requests" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1318,7 +1274,6 @@ requests = ">=2.0.1,<3.0.0" name = "rfc3986" version = "2.0.0" description = "Validating URI References per RFC 3986" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1333,7 +1288,6 @@ idna2008 = ["idna"] name = "rich" version = "13.7.1" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -1352,7 +1306,6 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] name = "secretstorage" version = "3.3.3" description = "Python bindings to FreeDesktop.org Secret Service API" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1368,7 +1321,6 @@ jeepney = ">=0.6" name = "setuptools" version = "70.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1384,7 +1336,6 @@ test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1396,7 +1347,6 @@ files = [ name = "stack-data" version = "0.6.3" description = "Extract data from python stack frames and tracebacks for informative displays" -category = "dev" optional = false python-versions = "*" files = [ @@ -1416,7 +1366,6 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] name = "tqdm" version = "4.66.4" description = "Fast, Extensible Progress Meter" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1437,7 +1386,6 @@ telegram = ["requests"] name = "traitlets" version = "5.14.3" description = "Traitlets Python configuration system" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1453,7 +1401,6 @@ test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0, name = "twine" version = "5.1.1" description = "Collection of utilities for publishing packages on PyPI" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1476,7 +1423,6 @@ urllib3 = ">=1.26.0" name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1488,7 +1434,6 @@ files = [ name = "urllib3" version = "2.2.2" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1506,7 +1451,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "wcwidth" version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" files = [ @@ -1518,7 +1462,6 @@ files = [ name = "websocket-client" version = "1.8.0" description = "WebSocket client for Python with low level API options" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1535,7 +1478,6 @@ test = ["websockets"] name = "websockets" version = "12.0" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1617,7 +1559,6 @@ files = [ name = "wheel" version = "0.43.0" description = "A built-package format for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1632,7 +1573,6 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "win32-setctime" version = "1.1.0" description = "A small Python utility to set file creation time on Windows" -category = "main" optional = false python-versions = ">=3.5" files = [ @@ -1647,7 +1587,6 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] name = "yt-dlp" version = "2024.7.2" description = "A feature-rich command-line audio/video downloader" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1679,7 +1618,6 @@ test = ["pytest (>=8.1,<9.0)"] name = "zipp" version = "3.19.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1694,4 +1632,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "a683728686b53f0f52c192f5acaa346bd904671d57c0423060db71371fc6f347" +content-hash = "76e17cb16ab4f189fd1341e4cce365f4eaee9267c1dc1160cde6107b31d3d536" diff --git a/pyproject.toml b/pyproject.toml index 54626ba..cea6c1f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,7 @@ pytest-dependency = "^0.6.0" twine = "^5.1.1" wheel = "^0.43.0" +pytest-mock = "^3.14.0" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" From 6e46679a7630f3ae0f266f67bc3378d251604836 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Justen=20=28=40turicas=29?= Date: Sun, 19 May 2024 21:28:04 -0300 Subject: [PATCH 095/115] Implement draft CLI module --- youtool/cli.py | 38 ++++++++++++-------------------------- 1 file changed, 12 insertions(+), 26 deletions(-) diff --git a/youtool/cli.py b/youtool/cli.py index 362d1d3..4433ffc 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -1,26 +1,17 @@ import argparse +import os + +from youtool.commands import COMMANDS def main(): -<<<<<<< HEAD - parser = argparse.ArgumentParser() - parser.add_argument("--api-key") - subparsers = parser.add_subparsers(required=True, dest="command") -======= """Main function for the YouTube CLI Tool. ->>>>>>> 7133ae0 (- Add updates) - - api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") - -<<<<<<< HEAD - cmd_channel_id = subparsers.add_parser("channel-id", help="Get channel IDs from a list of URLs (or CSV filename with URLs inside), generate CSV output (just the IDs)") - cmd_channel_info = subparsers.add_parser("channel-info", help="Get channel info from a list of IDs (or CSV filename with IDs inside), generate CSV output (same schema for `channel` dicts)") - cmd_video_info = subparsers.add_parser("video-info", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (same schema for `video` dicts)") - cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") - cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") - cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") - cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") -======= + + This function sets up the argument parser for the CLI tool, including options for the YouTube API key and + command-specific subparsers. It then parses the command-line arguments, retrieving the YouTube API key + from either the command-line argument '--api-key' or the environment variable 'YOUTUBE_API_KEY'. If the API + key is not provided through any means, it raises an argparse.ArgumentError. + Finally, the function executes the appropriate command based on the parsed arguments. If an exception occurs during the execution of the command, it is caught and raised as an argparse error for proper handling. @@ -30,20 +21,15 @@ def main(): """ parser = argparse.ArgumentParser(description="CLI Tool for managing YouTube videos add playlists") parser.add_argument("--api-key", type=str, help="YouTube API Key", dest="api_key") - parser.add_argument("--debug", type=bool, help="Debug mode", dest="debug") + parser.add_argument("--debug", default=False, action="store_true", help="Debug mode", dest="debug") subparsers = parser.add_subparsers(required=True, dest="command", title="Command", help="Command to be executed") - # cmd_video_search = subparsers.add_parser("video-search", help="Get video info from a list of IDs or URLs (or CSV filename with URLs/IDs inside), generate CSV output (simplified `video` dict schema or option to get full video info after)") - # cmd_video_comments = subparsers.add_parser("video-comments", help="Get comments from a video ID, generate CSV output (same schema for `comment` dicts)") - # cmd_video_livechat = subparsers.add_parser("video-livechat", help="Get comments from a video ID, generate CSV output (same schema for `chat_message` dicts)") - # cmd_video_transcriptions = subparsers.add_parser("video-transcription", help="Download video transcriptions based on language code, path and list of video IDs or URLs (or CSV filename with URLs/IDs inside), download files to destination and report results") - for command in COMMANDS: command.parse_arguments(subparsers) ->>>>>>> 7133ae0 (- Add updates) args = parser.parse_args() + args.api_key = args.api_key or os.environ.get("YOUTUBE_API_KEY") if not args.api_key: parser.error("YouTube API Key is required") @@ -57,4 +43,4 @@ def main(): if __name__ == "__main__": - main() + main() \ No newline at end of file From 2dd9e252d274b799abeba59e99046d34ba848181 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 00:59:00 -0300 Subject: [PATCH 096/115] Add test for base file --- tests/commands/test_base.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 7cf87d3..7863c80 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -2,6 +2,11 @@ import argparse import pytest +<<<<<<< HEAD +======= +from io import StringIO +from datetime import datetime +>>>>>>> fdb1fc6 (Add test for base file) from pathlib import Path from unittest.mock import MagicMock, patch, mock_open from youtool.commands import Command From 8281c7db8d818c21f35db007f1f50358942a9938 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Tue, 2 Jul 2024 12:40:16 -0300 Subject: [PATCH 097/115] Add update --- tests/commands/test_base.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tests/commands/test_base.py b/tests/commands/test_base.py index 7863c80..7cf87d3 100644 --- a/tests/commands/test_base.py +++ b/tests/commands/test_base.py @@ -2,11 +2,6 @@ import argparse import pytest -<<<<<<< HEAD -======= -from io import StringIO -from datetime import datetime ->>>>>>> fdb1fc6 (Add test for base file) from pathlib import Path from unittest.mock import MagicMock, patch, mock_open from youtool.commands import Command From 3249990a9e398c707ade16d8afedad8abb275ef6 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 13:57:55 -0300 Subject: [PATCH 098/115] - Add test for channel_info command; - Add update channel_info file; - fix test_base --- tests/commands/test_channel_info.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 329680e..7f83a39 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,7 +31,10 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): +def test_channel_ids_from_urls_and_usernames(mocker): + urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] + usernames = ["Turicas", "PythonicCafe"] + ids_from_urls_mock = "id_from_url" ids_from_usernames_mock = "id_from_username" youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") From 8e3346584b2de7a673103f0742e292885c028d29 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 21:32:42 -0300 Subject: [PATCH 099/115] - Add test for video_search command; - Add updates to some test files; - created conftest file --- tests/commands/test_channel_info.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 7f83a39..329680e 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,10 +31,7 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker): - urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] - usernames = ["Turicas", "PythonicCafe"] - +def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): ids_from_urls_mock = "id_from_url" ids_from_usernames_mock = "id_from_username" youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") From 50ac1b49e37b24cceed89e89721aea919c576c88 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 18:28:41 -0300 Subject: [PATCH 100/115] add github actions --- .github/workflows/ci.yaml | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 .github/workflows/ci.yaml diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..0bcc0b8 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,26 @@ +name: CI + +on: [ push, pull_request ] + +jobs: + ci: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.11 + uses: actions/setup-python@v4.3.0 + with: + python-version: 3.11 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install poetry==1.1.15 + poetry --version + poetry config experimental.new-installer false + poetry config virtualenvs.create false + poetry install -vv + cp .env-sample .env + - name: Lint + run: make lint + - name: Test + run: make test \ No newline at end of file From 981738230a0d19739914b7f92add54708902a8c5 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 22:01:57 -0300 Subject: [PATCH 101/115] add updates github actions --- .github/workflows/ci.yaml | 16 ++++++---------- 1 file changed, 6 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 0bcc0b8..ee3da63 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -6,21 +6,17 @@ jobs: ci: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + - name: Install poetry + run: pip install poetry==1.7.1 - name: Set up Python 3.11 - uses: actions/setup-python@v4.3.0 + uses: actions/setup-python@v5 with: python-version: 3.11 + cache: 'poetry' - name: Install dependencies run: | python -m pip install --upgrade pip - pip install poetry==1.1.15 - poetry --version - poetry config experimental.new-installer false - poetry config virtualenvs.create false poetry install -vv - cp .env-sample .env - - name: Lint - run: make lint - name: Test - run: make test \ No newline at end of file + run: make test From b252ef8bd610285b91bfc564deb0af632f25b026 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 23:33:11 -0300 Subject: [PATCH 102/115] fix --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index ee3da63..bdea834 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -19,4 +19,4 @@ jobs: python -m pip install --upgrade pip poetry install -vv - name: Test - run: make test + run: make test From 1c8a087df92a13b2b069ca6c77265faeea1915f8 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 23:55:54 -0300 Subject: [PATCH 103/115] fix --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index bdea834..fe87f5c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -19,4 +19,4 @@ jobs: python -m pip install --upgrade pip poetry install -vv - name: Test - run: make test + run: pytest From 32487add03a76a289086fb50b44afff03abde9ba Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 23:59:50 -0300 Subject: [PATCH 104/115] fix --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index fe87f5c..3703fa2 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,6 +1,6 @@ name: CI -on: [ push, pull_request ] +on: [ push, pull_request, commit ] jobs: ci: @@ -19,4 +19,4 @@ jobs: python -m pip install --upgrade pip poetry install -vv - name: Test - run: pytest + run: poetry run pytest From 6c6575c7947e0717357112e7ab8e1c0c67b2c74f Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Sat, 6 Jul 2024 00:00:56 -0300 Subject: [PATCH 105/115] fix --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3703fa2..99d2608 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -1,6 +1,6 @@ name: CI -on: [ push, pull_request, commit ] +on: [ push, pull_request ] jobs: ci: From 6af4f31840a154d87558b6a76efdfd9a976d4d8a Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Sat, 6 Jul 2024 00:09:02 -0300 Subject: [PATCH 106/115] fix --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 99d2608..fe87f5c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -19,4 +19,4 @@ jobs: python -m pip install --upgrade pip poetry install -vv - name: Test - run: poetry run pytest + run: pytest From 8223267b04a716a639306d265557a70ccc039c45 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lvaro=20Justen=20=28=40turicas=29?= Date: Sun, 19 May 2024 21:28:04 -0300 Subject: [PATCH 107/115] Implement draft CLI module --- youtool/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/cli.py b/youtool/cli.py index 4433ffc..49dfe12 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -43,4 +43,4 @@ def main(): if __name__ == "__main__": - main() \ No newline at end of file + main() From 98ea4d0bfc0306ecd94ba36c3224ca7db07718f4 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 13:57:55 -0300 Subject: [PATCH 108/115] - Add test for channel_info command; - Add update channel_info file; - fix test_base --- tests/commands/test_channel_info.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 329680e..7f83a39 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,7 +31,10 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): +def test_channel_ids_from_urls_and_usernames(mocker): + urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] + usernames = ["Turicas", "PythonicCafe"] + ids_from_urls_mock = "id_from_url" ids_from_usernames_mock = "id_from_username" youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") From 03dbcd47d38495d33fb503a588acf855f49371f8 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Thu, 4 Jul 2024 21:32:42 -0300 Subject: [PATCH 109/115] - Add test for video_search command; - Add updates to some test files; - created conftest file --- tests/commands/test_channel_info.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/tests/commands/test_channel_info.py b/tests/commands/test_channel_info.py index 7f83a39..329680e 100644 --- a/tests/commands/test_channel_info.py +++ b/tests/commands/test_channel_info.py @@ -31,10 +31,7 @@ def test_filter_fields(): assert filtered_info == expected_result, f"Expected {expected_result}, but got {filtered_info}" -def test_channel_ids_from_urls_and_usernames(mocker): - urls = ["https://www.youtube.com/@Turicas/featured", "https://www.youtube.com/c/PythonicCaf%C3%A9"] - usernames = ["Turicas", "PythonicCafe"] - +def test_channel_ids_from_urls_and_usernames(mocker, channels_urls, usernames): ids_from_urls_mock = "id_from_url" ids_from_usernames_mock = "id_from_username" youtube_mock = mocker.patch("youtool.commands.channel_info.YouTube") From ffbccff2f77b2a29cbc54f631105725ba195d00d Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 18:28:41 -0300 Subject: [PATCH 110/115] add github actions --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index cea6c1f..b15c141 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,14 @@ [tool.poetry] name = "youtool" +<<<<<<< HEAD version = "0.1.1" description = "Easy-to-use library to access YouTube Data API v3 in bulk operations" authors = ["Álvaro Justen "] +======= +version = "0.1.0" +description = "" +authors = ["Your Name "] +>>>>>>> 4c93b16 (add github actions) readme = "README.md" [tool.poetry.dependencies] From 37d2a75d6630ea48fe6e5c525ba48e835808a92c Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Fri, 5 Jul 2024 23:52:08 -0300 Subject: [PATCH 111/115] add file readthedocs config --- .readthedocs.yaml | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 .readthedocs.yaml diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..124910a --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,35 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.12" + # You can also specify other tool versions: + # nodejs: "20" + # rust: "1.70" + # golang: "1.20" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs + # builder: "dirhtml" + # Fail on all warnings to avoid broken references + # fail_on_warning: true + +# Optionally build your docs in additional formats such as PDF and ePub +# formats: +# - pdf +# - epub + +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +# python: +# install: +# - requirements: docs/requirements.txt \ No newline at end of file From ca1d94b6ed222a43ba56d8212b8849926e20def8 Mon Sep 17 00:00:00 2001 From: aninhasalesp Date: Mon, 8 Jul 2024 22:13:33 -0300 Subject: [PATCH 112/115] implementing read the docs config --- CONTRIBUTING.rst | 120 +++++++++++++++++++ docs/Makefile | 20 ++++ docs/conf.py | 27 +++++ docs/contributing.rst | 1 + docs/index.rst | 156 +++++++++++++++++++++++++ docs/make.bat | 35 ++++++ poetry.lock | 263 +++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 8 files changed, 622 insertions(+), 1 deletion(-) create mode 100644 CONTRIBUTING.rst create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/contributing.rst create mode 100644 docs/index.rst create mode 100644 docs/make.bat diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..3ffe9be --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,120 @@ +How to contribute to Youtool +============================ + +Thank you for considering to Youtool! + +First time setup in your local environment: +------------------------------------------- + +- Make sure you have a `GitHub account `_ + +- Fork Youtool to your GitHub account by clicking the `Fork `_ button + +- `Clone `_ your fork locally, replacing your-username in the command below with your actual username + +.. code-block:: + + git clone https://github.com/your-username/youtool + cd youtool + +Installation Poetry +------------------- +To manage dependencies and packaging for the project, we use Poetry. +- Please follow the installation instructions provided in the `poetry `_ + + +Setting Up the Virtual Environment +---------------------------------- +- After installing Poetry, you need to set up the virtual environment for the project. Navigate to the project directory and run the following command: + +.. code-block:: + + poetry shell + +This command will create and activate a virtual environment for the project. + + +Installing Dependencies +----------------------- +- Once the virtual environment is activated, you can install the project dependencies by running: + +.. code-block:: + + poetry install + +This command will install all the dependencies listed in the pyproject.toml file. + + +Creating a Local Branch from a Remote Branch +-------------------------------------------- +To start contributing, you need to create a local branch based on a remote branch. +Use the following commands to achieve this: +1. Fetch the latest changes from the remote repository: + +.. code-block:: + + git fetch origin + +2. Create and switch to a new branch based on the remote branch: + +.. code-block:: + + git checkout -b origin/ + +Push your commits to your fork on GitHub and `create a pull request `_. Link to the issue being addressed with fixes #123 in the pull request description. + +.. code-block:: + + git push --set-upstream origin nome-do-seu-branch + +Replace with your desired branch name and with the name of the remote branch you want to base your work on. + +By following these steps, you'll have a local branch set up and ready for your contributions. + + +Running Tests +------------- +Before submitting your changes, it's important to run the tests to ensure everything is working correctly. +Depending on whether you are inside or outside the virtual environment, use one of the following commands: + +1. Inside the virtual environment: +If you have already activated the virtual environment with poetry shell, run: + +.. code-block:: + + pytest + +2. Outside the virtual environment: +If you are not inside the virtual environment, you can still run the tests using Poetry: + +.. code-block:: + + poetry run pytest + +By following these steps, you'll ensure that all tests are run correctly before submitting your contributions. + +Updating Documentation +---------------------- +Our documentation is hosted on Read the Docs, and the configuration files are located in the docs directory. To update the documentation, follow these steps: + +1. Navigate to the docs directory: + +.. code-block:: + + cd docs + +2. Make your changes: + Edit the necessary files to update the documentation. + The main configuration file is typically conf.py, but you may also need to update other ``.rst`` files as required. + +3. Build the documentation locally: + After making your changes, you can build the HTML version of the documentation to preview your updates. + Run the following command: + +.. code-block:: + + make html + +Open ``_build/html/index.html`` in your browser to view the docs. + +Read more about `Sphinx `_. diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d4bb2cb --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..3f73132 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,27 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = 'Youtool' +copyright = '2024, Álvaro Justen' +author = 'Álvaro Justen' + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["myst_parser"] + +templates_path = ['_templates'] +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = 'alabaster' +html_static_path = ['_static'] diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 0000000..3bdd7dc --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1 @@ +.. include:: ../CONTRIBUTING.rst \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..7094336 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,156 @@ +.. youtool documentation master file, created by + sphinx-quickstart on Mon Jul 8 14:31:22 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to youtool documentation! +================================= + +Easily access YouTube Data API v3 in batches +-------------------------------------------- + +.. toctree:: + :maxdepth: 2 + + contributing + +-------------------------------------------- + +Python library and command-line interface to crawl YouTube Data API v3 in batch operations and other related tasks. +Easier to use than alternatives - you don't need to spend time learning the YouTube API and its caveats. +With this library you can get: + +- Channel ID from channel URL (scraping) or username (API) +- Channel information (title, subscribers etc.) +- List of playlists for a channel +- List of videos for a playlist +- Video information (title, description, likes, comments etc.) +- Comments +- Livechat, including superchat (scraping using chat-downloader) +- Automatic transcription (scraping using yt-dlp) + +The library will automatically: + +- Try as many keys as you provide +- Use batch of 50 items in supported API endpoints +- Paginate when needed + +Installation +------------ + +Install project by running + +.. code-block:: python + + pip install youtool + +Using as a library +------------------ +Just follow the tutorial/examples below and check the ``help()`` for ``YouTube`` methods. + +`GitHub Repository `_ + +1. Initializing the YouTube API: + +.. code-block:: python + + from youtool import YouTube + + api_keys = ["key1", "key2", ...] + yt = YouTube(api_keys, disable_ipv6=True) + +Here, we are creating an instance of the YouTube class using a list of YouTube API keys. +The disable_ipv6=True option is passed to disable IPv6 usage. + +2. Extracting Channel IDs by url: + +.. code-block:: python + + channel_id_1 = yt.channel_id_from_url("https://youtube.com/c/PythonicCafe/") + print(f"Pythonic Café's channel ID (got from URL): {channel_id_1}") + +3. Extracting Channel IDs by username: + +.. code-block:: python + + channel_id_2 = yt.channel_id_from_username("turicas") + print(f"Turicas' channel ID (got from username): {channel_id_2}") + +4. Listing Playlists from a Channel: + +.. code-block:: python + + for playlist in yt.channel_playlists(channel_id_2): + for video in yt.playlist_videos(playlist["id"]): + print(f" Video: {video}") + +Here, we iterate through playlists of a specific channel (channel_id_2) and list the videos in each playlist. + +5. Searching for Videos: + +.. code-block:: python + + for index, video in enumerate(yt.video_search(term="Álvaro Justen")): + print(f" Video: {video}") + if index == 4: + break + +This snippet searches for videos related to a specific term using the video_search method of the yt instance. + +6. Fetching Detailed Video Information: + +.. code-block:: python + + last_video = list(yt.videos_infos([video["id"]]))[0] + pprint(last_video) + +Here, we fetch detailed information about a specific video using the videos_infos method of the yt instance. + +7. Fetching Channel Information: + +.. code-block:: python + + for channel in yt.channels_infos([channel_id_1, channel_id_2]): + print(channel) + +This snippet fetches detailed information about multiple channels using the channels_infos method of the yt instance. + +8. Fetching Video Comments and Live Chat: + +.. code-block:: python + + for comment in yt.video_comments(video_id): + print(comment) + for chat_message in yt.video_livechat(live_video_id): + print(chat_message) + +Here, we fetch comments and live chat messages from specific videos using the video_comments and video_livechat methods of the yt instance. + +9. Downloading Video Transcriptions: + +.. code-block:: python + + yt.videos_transcriptions([video_id, live_video_id], language_code="pt", path=download_path) + +This snippet downloads transcriptions for specific videos using the videos_transcriptions method of the yt instance. + +How to contribute +------------------ + +Welcome to contributing documentation youtool project + +See :doc:`contributing` for more detail. + +- `Issue Tracker `_ +- `Source Code `_ + +Support +------- + +If you are having issues, please let us know + +License +------- +GNU Lesser General Public License (LGPL) version3 + +This project was developed in a partnership between Pythonic Café and `Novelo Data `_ \ No newline at end of file diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..32bb245 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/poetry.lock b/poetry.lock index e699162..ad20baf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,16 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "alabaster" +version = "0.7.16" +description = "A light, configurable Sphinx theme" +optional = false +python-versions = ">=3.9" +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] + [[package]] name = "asttokens" version = "2.4.1" @@ -32,6 +43,20 @@ files = [ [package.dependencies] pyflakes = ">=3.0.0" +[[package]] +name = "babel" +version = "2.15.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + [[package]] name = "backports-tarfile" version = "1.2.0" @@ -587,6 +612,17 @@ files = [ {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + [[package]] name = "importlib-metadata" version = "8.0.0" @@ -770,6 +806,23 @@ files = [ test = ["async-timeout", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] trio = ["async_generator", "trio"] +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + [[package]] name = "keyring" version = "25.2.1" @@ -837,6 +890,75 @@ profiling = ["gprof2dot"] rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -1343,6 +1465,145 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "7.3.7" +description = "Python documentation generator" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinx-7.3.7-py3-none-any.whl", hash = "sha256:413f75440be4cacf328f580b4274ada4565fb2187d696a84970c23f77b64d8c3"}, + {file = "sphinx-7.3.7.tar.gz", hash = "sha256:a4a7db75ed37531c05002d56ed6948d4c42f473a36f46e1382b0bd76ca9627bc"}, +] + +[package.dependencies] +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.18.1,<0.22" +imagesize = ">=1.3" +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.14" +requests = ">=2.25.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["flake8 (>=3.5.0)", "importlib_metadata", "mypy (==1.9.0)", "pytest (>=6.0)", "ruff (==0.3.7)", "sphinx-lint", "tomli", "types-docutils", "types-requests"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=6.0)", "setuptools (>=67.0)"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.8" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_applehelp-1.0.8-py3-none-any.whl", hash = "sha256:cb61eb0ec1b61f349e5cc36b2028e9e7ca765be05e49641c97241274753067b4"}, + {file = "sphinxcontrib_applehelp-1.0.8.tar.gz", hash = "sha256:c40a4f96f3776c4393d933412053962fac2b84f4c99a7982ba42e09576a70619"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.6" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_devhelp-1.0.6-py3-none-any.whl", hash = "sha256:6485d09629944511c893fa11355bda18b742b83a2b181f9a009f7e500595c90f"}, + {file = "sphinxcontrib_devhelp-1.0.6.tar.gz", hash = "sha256:9893fd3f90506bc4b97bdb977ceb8fbd823989f4316b28c3841ec128544372d3"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.5" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_htmlhelp-2.0.5-py3-none-any.whl", hash = "sha256:393f04f112b4d2f53d93448d4bce35842f62b307ccdc549ec1585e950bc35e04"}, + {file = "sphinxcontrib_htmlhelp-2.0.5.tar.gz", hash = "sha256:0dc87637d5de53dd5eec3a6a01753b1ccf99494bd756aafecd74b4fa9e729015"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.7" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_qthelp-1.0.7-py3-none-any.whl", hash = "sha256:e2ae3b5c492d58fcbd73281fbd27e34b8393ec34a073c792642cd8e529288182"}, + {file = "sphinxcontrib_qthelp-1.0.7.tar.gz", hash = "sha256:053dedc38823a80a7209a80860b16b722e9e0209e32fea98c90e4e6624588ed6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.10" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" +optional = false +python-versions = ">=3.9" +files = [ + {file = "sphinxcontrib_serializinghtml-1.1.10-py3-none-any.whl", hash = "sha256:326369b8df80a7d2d8d7f99aa5ac577f51ea51556ed974e7716cfd4fca3f6cb7"}, + {file = "sphinxcontrib_serializinghtml-1.1.10.tar.gz", hash = "sha256:93f3f5dc458b91b192fe10c397e324f262cf163d79f3282c158e8436a2c4511f"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +standalone = ["Sphinx (>=5)"] +test = ["pytest"] + [[package]] name = "stack-data" version = "0.6.3" @@ -1632,4 +1893,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "76e17cb16ab4f189fd1341e4cce365f4eaee9267c1dc1160cde6107b31d3d536" +content-hash = "27c89f6f3e6a318198d21d63dbdbb98b7f27b1e32c774b61b6e9fc1cbc322fb5" diff --git a/pyproject.toml b/pyproject.toml index b15c141..dce859b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ twine = "^5.1.1" wheel = "^0.43.0" pytest-mock = "^3.14.0" +sphinx = "^7.3.7" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" From 8e22018dc68b91f8323114581ebcc982d28b608f Mon Sep 17 00:00:00 2001 From: Ana Paula Sales Date: Thu, 17 Oct 2024 19:03:08 -0300 Subject: [PATCH 113/115] fix --- pyproject.toml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index dce859b..bc91704 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,8 @@ [tool.poetry] name = "youtool" -<<<<<<< HEAD version = "0.1.1" description = "Easy-to-use library to access YouTube Data API v3 in bulk operations" authors = ["Álvaro Justen "] -======= -version = "0.1.0" -description = "" -authors = ["Your Name "] ->>>>>>> 4c93b16 (add github actions) readme = "README.md" [tool.poetry.dependencies] From 32c73c059c781ef0771d2ae1e25eb58fd780d124 Mon Sep 17 00:00:00 2001 From: Ana Paula Sales Date: Thu, 17 Oct 2024 23:38:54 -0300 Subject: [PATCH 114/115] fix --- youtool/cli.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/youtool/cli.py b/youtool/cli.py index 49dfe12..3964dc6 100644 --- a/youtool/cli.py +++ b/youtool/cli.py @@ -1,7 +1,7 @@ import argparse import os -from youtool.commands import COMMANDS +from commands import COMMANDS def main(): From 31b90d0232e5b230463ba40fcaafc8cae617af81 Mon Sep 17 00:00:00 2001 From: Ana Paula Sales Date: Thu, 17 Oct 2024 23:39:17 -0300 Subject: [PATCH 115/115] fix --- youtool/commands/video_livechat.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/youtool/commands/video_livechat.py b/youtool/commands/video_livechat.py index 775b857..4469839 100644 --- a/youtool/commands/video_livechat.py +++ b/youtool/commands/video_livechat.py @@ -11,7 +11,7 @@ class VideoLiveChat(Command): arguments = [ {"name": "--id", "type": str, "help": "Video ID", "required": True}, {"name": "--output-file-path", "type": str, "help": "Output CSV file path"}, - {"name": "--expand-emojis", "type": bool, "help": "Expand emojis in chat messages", "default": True} + {"name": "--expand-emojis", "help": "Expand emojis in chat messages", "default": True, "action": "store_true"} ] CHAT_COLUMNS: List[str] = [ @@ -22,7 +22,10 @@ class VideoLiveChat(Command): @staticmethod def parse_timestamp(timestamp: str) -> str: - return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S') + try: + return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S') + except ValueError: + return datetime.utcfromtimestamp(int(timestamp) / 1000000).strftime('%Y-%m-%d %H:%M:%S') @staticmethod def parse_decimal(value: Optional[str]) -> Optional[float]: @@ -58,7 +61,7 @@ def execute(cls: Self, **kwargs) -> str: text = message["message"] if expand_emojis: for emoji in message.get("emotes", []): - for shortcut in emoji["shortcuts"]: + for shortcut in (emoji.get("shortcuts") or []): text = text.replace(shortcut, emoji["id"]) money = message.get("money", {}) or {} chat_messages.append({