Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[WIP] Add Error Groupings and reduce walls of text #2405

Closed
wants to merge 8 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion PART
Original file line number Diff line number Diff line change
@@ -1 +1 @@

3
20 changes: 10 additions & 10 deletions kometa.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from modules.logs import MyLogger

if sys.version_info[0] != 3 or sys.version_info[1] < 9:
print("Python Version %s.%s.%s has been detected and is not supported. Kometa requires a minimum of Python 3.9.0." % (sys.version_info[0], sys.version_info[1], sys.version_info[2]))
print("[C0001] Python Version %s.%s.%s has been detected and is not supported. Kometa requires a minimum of Python 3.9.0." % (sys.version_info[0], sys.version_info[1], sys.version_info[2]))
sys.exit(0)

try:
Expand All @@ -16,7 +16,7 @@
from plexapi.exceptions import NotFound
from plexapi.video import Show, Season
except (ModuleNotFoundError, ImportError) as ie:
print(f"Requirements Error: Requirements are not installed.\nPlease follow the documentation for instructions on installing requirements. ({ie})")
print(f"[C0002] Requirements Error: Requirements are not installed.\nPlease follow the documentation for instructions on installing requirements. ({ie})")
sys.exit(0)

system_versions = {
Expand Down Expand Up @@ -188,10 +188,11 @@ def get_env(env_str, default, arg_bool=False, arg_int=False):
print(f"Argument Error: width argument invalid: {run_args['width']} must be an integer between 90 and 300. Using the default value of 100")
run_args["width"] = 100


if run_args["config"] and os.path.exists(run_args["config"]):
default_dir = os.path.join(os.path.dirname(os.path.abspath(run_args["config"])))
elif run_args["config"] and not os.path.exists(run_args["config"]):
print(f"Config Error: Configuration file (config.yml) not found at {os.path.abspath(run_args['config'])}")
print(f"[CFE0001] Config Error: Configuration file (config.yml) not found at {os.path.abspath(run_args['config'])}")
sys.exit(0)
elif not os.path.exists(os.path.join(default_dir, "config.yml")):
git_branch = git_branch or "master"
Expand All @@ -207,10 +208,9 @@ def get_env(env_str, default, arg_bool=False, arg_int=False):
else:
raise requests.RequestException
except requests.RequestException as e:
print(f"Config Error: Unable to download the configuration file from GitHub (URL: {github_url}'). Please save it as '{config_path}' before running Kometa again.")
print(f"[CFE0005] Config Error: Unable to download the configuration file from GitHub (URL: {github_url}'). Please save it as '{config_path}' before running Kometa again.")
sys.exit(1)


logger = MyLogger("Kometa", default_dir, run_args["width"], run_args["divider"][0], run_args["ignore-ghost"],
run_args["tests"] or run_args["debug"], run_args["trace"], run_args["log-requests"])

Expand Down Expand Up @@ -306,7 +306,7 @@ def start(attrs):
if sys_ver and sys_ver != required_versions[req_name]:
logger.info(f" {req_name} version: {sys_ver} requires an update to: {required_versions[req_name]}")
except FileNotFoundError:
logger.error(" File Error: requirements.txt not found")
logger.error("[E0001] File Error: requirements.txt not found")
if "time" in attrs and attrs["time"]: start_type = f"{attrs['time']} "
elif run_args["tests"]: start_type = "Test "
elif "collections" in attrs and attrs["collections"]: start_type = "Collections "
Expand Down Expand Up @@ -363,7 +363,7 @@ def start(attrs):
config.Webhooks.end_time_hooks(start_time, end_time, run_time, stats)
except Failed as e:
logger.stacktrace()
logger.error(f"Webhooks Error: {e}")
logger.error(f"[E0002] Webhooks Error: {e}")
version_line = f"Version: {my_requests.local}"
if my_requests.newest:
version_line = f"{version_line} Newest Version: {my_requests.newest}"
Expand Down Expand Up @@ -659,7 +659,7 @@ def run_libraries(config):
library.delete(collection)
logger.info(f"Collection {collection.title} Deleted")
except Failed as e:
logger.error(e)
logger.error(f"[E0003] {collection.title} | Collection could not be removed")
library_status[library.name]["All Collections Deleted"] = str(datetime.now() - time_start).split('.')[0]

if run_args["delete-labels"] and not run_args["playlists-only"]:
Expand All @@ -679,7 +679,7 @@ def run_libraries(config):
sync = ["Overlay"] if "Overlay" in [lbl.tag for lbl in item.labels] else []
library.edit_tags("label", item, sync_tags=sync)
except NotFound:
logger.error(f"{item.title[:25]:<25} | Labels Failed to be Removed")
logger.error(f"[E0004] {item.title[:25]:<25} | Labels Failed to be Removed")
library_status[library.name]["All Labels Deleted"] = str(datetime.now() - time_start).split('.')[0]

time_start = datetime.now()
Expand Down Expand Up @@ -1180,7 +1180,7 @@ def run_playlists(config):
time_str += f"{minutes} Minute{'s' if minutes > 1 else ''}"
logger.ghost(f"Current Time: {current_time} | {time_str} until the next run at {og_time_str} | Runs: {', '.join(valid_times)}")
else:
logger.error(f"Time Error: {valid_times}")
logger.error(f"[E0005] Time Error: {valid_times}")
time.sleep(60)
except KeyboardInterrupt:
logger.separator("Exiting Kometa")
12 changes: 6 additions & 6 deletions modules/anidb.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def _parse(attr, xpath, is_list=False, is_dict=False, is_int=False, is_float=Fal
except (ValueError, TypeError):
pass
if fail:
raise Failed(f"AniDB Error: No Anime Found for AniDB ID: {self.anidb_id}")
raise Failed(f"[MCE0001] AniDB Error: No Anime Found for AniDB ID: {self.anidb_id}")
elif is_list:
return []
elif is_dict:
Expand Down Expand Up @@ -118,7 +118,7 @@ def authorize(self, client, version, expiration):
self.version = None
if self.cache:
self.cache.update_testing("anidb_login", self.client, self.version, "False")
raise
raise Failed("[CFE0002] Connector Error: AniDB Client/Version could not be verified. Please check these are correct. AniDB Library Operations will not function until this is resolved.") from e

@property
def is_authorized(self):
Expand All @@ -129,7 +129,7 @@ def login(self, username, password):
logger.secret(password)
data = {"show": "main", "xuser": username, "xpass": password, "xdoautologin": "on"}
if not self._request(urls["login"], data=data).xpath("//li[@class='sub-menu my']/@title"):
raise Failed("AniDB Error: Login failed")
raise Failed("[CFE0003] Connector Error: AniDB Username/Password could not be verified. Please verify that the username and password are correct. AniDB Builders will still work but Mature content will not be reachable until this is resolved.")
self.username = username
self.password = password

Expand All @@ -155,7 +155,7 @@ def _validate(self, anidb_id):
ids = response.xpath(f"//*[text()='a{anidb_id}']/text()")
if len(ids) > 0:
return util.regex_first_int(ids[0], "AniDB ID")
raise Failed(f"AniDB Error: AniDB ID: {anidb_id} not found")
raise Failed(f"[MCE0002] AniDB Error: AniDB ID: {anidb_id} not found")

def validate_anidb_ids(self, anidb_ids):
anidb_list = util.get_int_list(anidb_ids, "AniDB ID")
Expand All @@ -167,7 +167,7 @@ def validate_anidb_ids(self, anidb_ids):
logger.error(e)
if len(anidb_values) > 0:
return anidb_values
raise Failed(f"AniDB Error: No valid AniDB IDs in {anidb_list}")
raise Failed(f"[BLE0001] AniDB Error: No valid AniDB IDs in {anidb_list}")

def _tag(self, tag, limit):
anidb_ids = []
Expand Down Expand Up @@ -220,7 +220,7 @@ def get_anidb_ids(self, method, data):
logger.info(f"Processing AniDB Relation: {data}")
anidb_ids.extend(self._relations(data))
else:
raise Failed(f"AniDB Error: Method {method} not supported")
raise Failed(f"[BLE0002] AniDB Error: Method {method} not supported")
logger.debug("")
logger.debug(f"{len(anidb_ids)} AniDB IDs Found")
logger.trace(f"IDs: {anidb_ids}")
Expand Down
16 changes: 8 additions & 8 deletions modules/anilist.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def _request(self, query, variables, level=1):
time.sleep(wait_time if wait_time > 0 else 10)
if level < 6:
return self._request(query, variables, level=level + 1)
raise Failed(f"AniList Error: Connection Failed")
raise Failed(f"[CFE0004] AniList Error: Connection Failed")
else:
raise Failed(f"AniList Error: {json_obj['errors'][0]['message']}")
else:
Expand All @@ -100,7 +100,7 @@ def _validate_id(self, anilist_id):
media = self._request(query, {"id": anilist_id})["data"]["Media"]
if media["id"]:
return media["id"], media["title"]["english" if media["title"]["english"] else "romaji"]
raise Failed(f"AniList Error: No AniList ID found for {anilist_id}")
raise Failed(f"[BLE0003] AniList Error: No AniList ID found for {anilist_id}")

def _pagenation(self, query, limit=0, variables=None):
anilist_ids = []
Expand Down Expand Up @@ -255,13 +255,13 @@ def validate_userlist(self, data):
variables = {"user": data["username"]}
json_obj = self._request(query, variables)
if not json_obj["data"]["MediaListCollection"]:
raise Failed(f"AniList Error: User: {data['username']} not found")
raise Failed(f"[BLE0007] AniList Error: User: {data['username']} not found")
list_names = [n["name"] for n in json_obj["data"]["MediaListCollection"]["lists"]]
if not list_names:
raise Failed(f"AniList Error: User: {data['username']} has no Lists")
raise Failed(f"[BLE0008] AniList Error: User: {data['username']} has no Lists")
if data["list_name"] in list_names:
return data
raise Failed(f"AniList Error: List: {data['list_name']} not found\nOptions: {', '.join(list_names)}")
raise Failed(f"[BLE0004] AniList Error: List: {data['list_name']} not found\nOptions: {', '.join(list_names)}")

def validate(self, name, data):
valid = []
Expand All @@ -270,7 +270,7 @@ def validate(self, name, data):
valid.append(d)
if len(valid) > 0:
return valid
raise Failed(f"AniList Error: {name}: {data} does not exist\nOptions: {', '.join([v for k, v in self.options[name].items()])}")
raise Failed(f"[BLE0005] AniList Error: {name}: {data} does not exist\nOptions: {', '.join([v for k, v in self.options[name].items()])}")

def validate_anilist_ids(self, anilist_ids, studio=False):
anilist_id_list = util.get_int_list(anilist_ids, "AniList ID")
Expand All @@ -283,7 +283,7 @@ def validate_anilist_ids(self, anilist_ids, studio=False):
except Failed as e: logger.error(e)
if len(anilist_values) > 0:
return anilist_values
raise Failed(f"AniList Error: No valid AniList IDs in {anilist_ids}")
raise Failed(f"[BLE0006] AniList Error: No valid AniList IDs in {anilist_ids}")

def get_anilist_ids(self, method, data):
if method == "anilist_id":
Expand All @@ -307,7 +307,7 @@ def get_anilist_ids(self, method, data):
elif method == "anilist_top_rated":
data = {"limit": data, "score.gt": 3, "sort_by": "score"}
elif method not in builders:
raise Failed(f"AniList Error: Method {method} not supported")
raise Failed(f"[BLE0009] AniList Error: Method {method} not supported")
message = f"Processing {method.replace('_', ' ').title().replace('Anilist', 'AniList')}:\n\tSort By {pretty_names[data['sort_by']]}"
if data['limit'] > 0:
message += f"\n\tLimit to {data['limit']} Anime"
Expand Down
2 changes: 1 addition & 1 deletion modules/github.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,4 +108,4 @@ def translation_yaml(self, translation_key):
if k in yaml:
output[k] = yaml[k]
self._translations[translation_key] = output
return self._translations[translation_key]
return self._translations[translation_key]
6 changes: 3 additions & 3 deletions modules/icheckmovies.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,16 @@ def validate_icheckmovies_lists(self, icheckmovies_lists, language):
for icheckmovies_list in util.get_list(icheckmovies_lists, split=False):
list_url = icheckmovies_list.strip()
if not list_url.startswith(base_url):
raise Failed(f"ICheckMovies Error: {list_url} must begin with: {base_url}")
raise Failed(f"[BLE0041] ICheckMovies Error: {list_url} must begin with: {base_url}")
elif len(self._parse_list(list_url, language)) > 0:
valid_lists.append(list_url)
else:
raise Failed(f"ICheckMovies Error: {list_url} failed to parse")
raise Failed(f"[BLE0042] ICheckMovies Error: {list_url} failed to parse")
return valid_lists

def get_imdb_ids(self, method, data, language):
if method == "icheckmovies_list":
logger.info(f"Processing ICheckMovies List: {data}")
return self._parse_list(data, language)
else:
raise Failed(f"ICheckMovies Error: Method {method} not supported")
raise Failed(f"[BLE0043] ICheckMovies Error: Method {method} not supported")
28 changes: 14 additions & 14 deletions modules/imdb.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,19 +216,19 @@ def validate_imdb(self, err_type, method, imdb_dicts):
imdb_dict[main] = imdb_dict["url"]
dict_methods = {dm.lower(): dm for dm in imdb_dict}
if main not in dict_methods:
raise Failed(f"{err_type} Error: {method} {main} attribute not found")
raise Failed(f"[BLE0010] IMDb {err_type} Error: {method} {main} attribute not found")
elif imdb_dict[dict_methods[main]] is None:
raise Failed(f"{err_type} Error: {method} {main} attribute is blank")
raise Failed(f"[BLE0011] IMDb {err_type} Error: {method} {main} attribute is blank")
else:
main_data = imdb_dict[dict_methods[main]].strip()
if method == "imdb_list":
if main_data.startswith(f"{base_url}/search/"):
raise Failed(f"IMDb Error: URLs with https://www.imdb.com/search/ no longer works with {method} use imdb_search.")
raise Failed(f"[BLE0012] IMDb Error: URLs with https://www.imdb.com/search/ no longer works with {method} use imdb_search.")
if main_data.startswith(f"{base_url}/filmosearch/"):
raise Failed(f"IMDb Error: URLs with https://www.imdb.com/filmosearch/ no longer works with {method} use imdb_search.")
raise Failed(f"[BLE0013] IMDb Error: URLs with https://www.imdb.com/filmosearch/ no longer works with {method} use imdb_search.")
search = re.search(r"(ls\d+)", main_data)
if not search:
raise Failed(f"IMDb Error: {method} {main} must begin with ls (ex. ls005526372)")
raise Failed(f"[BLE0014] IMDb Error: {method} {main} must begin with ls (ex. ls005526372)")
new_dict = {main: search.group(1)}
else:
user_id = None
Expand All @@ -238,12 +238,12 @@ def validate_imdb(self, err_type, method, imdb_dicts):
except ValueError:
pass
if not user_id:
raise Failed(f"{err_type} Error: {method} {main}: {main_data} not in the format of 'ur########'")
raise Failed(f"[BLE0015] IMDb {err_type} Error: {method} {main}: {main_data} not in the format of 'ur########'")
new_dict = {main: main_data}

if "limit" in dict_methods:
if imdb_dict[dict_methods["limit"]] is None:
logger.warning(f"{err_type} Warning: {method} limit attribute is blank using 0 as default")
logger.warning(f"[BLW0001] IMDb {err_type} Warning: {method} limit attribute is blank using 0 as default")
else:
try:
value = int(str(imdb_dict[dict_methods["limit"]]))
Expand All @@ -252,7 +252,7 @@ def validate_imdb(self, err_type, method, imdb_dicts):
except ValueError:
pass
if "limit" not in new_dict:
logger.warning(f"{err_type} Warning: {method} limit attribute: {imdb_dict[dict_methods['limit']]} must be an integer 0 or greater using 0 as default")
logger.warning(f"[BLW0002] IMDb {err_type} Warning: {method} limit attribute: {imdb_dict[dict_methods['limit']]} must be an integer 0 or greater using 0 as default")
if "limit" not in new_dict:
new_dict["limit"] = 0

Expand Down Expand Up @@ -447,10 +447,10 @@ def _pagination(self, data, list_type):
logger.exorcise()
if len(imdb_ids) > 0:
return imdb_ids
raise Failed("IMDb Error: No IMDb IDs Found")
raise Failed("[BLE0016] IMDb Error: No IMDb IDs Found")
except KeyError:
if 'errors' in response_json.keys() and 'message' in response_json['errors'][0] and response_json['errors'][0]['message'] == 'PersistedQueryNotFound':
raise Failed("Internal IMDB PersistedQuery Error")
raise Failed("[BLE0017] IMDb Error: Internal PersistedQuery Error. Contact the Kometa Team.")
logger.error(f"Response: {response_json}")
raise

Expand Down Expand Up @@ -503,7 +503,7 @@ def keywords(self, imdb_id, language, ignore_cache=False):
return imdb_keywords
keywords = self._request(f"{base_url}/title/{imdb_id}/keywords", language=language, xpath="//td[@class='soda sodavote']")
if not keywords:
raise Failed(f"IMDb Error: No Item Found for IMDb ID: {imdb_id}")
raise Failed(f"[BLE0018] IMDb Error: No keywords found for IMDb ID: {imdb_id}")
for k in keywords:
name = k.xpath("div[@class='sodatext']/a/text()")[0]
relevant = k.xpath("div[@class='did-you-know-actions']/div/a/text()")[0].strip()
Expand All @@ -530,14 +530,14 @@ def parental_guide(self, imdb_id, ignore_cache=False):
if v not in parental_dict:
parental_dict[v] = None
else:
raise Failed(f"IMDb Error: No Parental Guide Found for IMDb ID: {imdb_id}")
raise Failed(f"[BLE0019] IMDb Error: No Parental Guide Found for IMDb ID: {imdb_id}")
if self.cache and not ignore_cache:
self.cache.update_imdb_parental(expired, imdb_id, parental_dict, self.cache.expiration)
return parental_dict

def _ids_from_chart(self, chart, language):
if chart not in chart_urls:
raise Failed(f"IMDb Error: chart: {chart} not ")
raise Failed(f"[BLE0020] IMDb Error: chart: {chart} not ")
script_data = self._request(f"{base_url}/{chart_urls[chart]}", language=language, xpath="//script[@id='__NEXT_DATA__']/text()")[0]
return [x.group(1) for x in re.finditer(r'"(tt\d+)"', script_data)]

Expand Down Expand Up @@ -571,7 +571,7 @@ def get_imdb_ids(self, method, data, language):
logger.info(f" {k}: {v}")
return [(_i, "imdb") for _i in self._pagination(data, "search")]
else:
raise Failed(f"IMDb Error: Method {method} not supported")
raise Failed(f"[BLE0021] IMDb Error: Method {method} not supported")

def _interface(self, interface):
gz = os.path.join(self.default_dir, f"title.{interface}.tsv.gz")
Expand Down
Loading
Loading