diff --git a/requirements.txt b/requirements.txt index 5e1f809..91fac51 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,4 +9,6 @@ uvicorn[standard]~=0.29.0 cryptography DateTime~=5.5 -loguru~=0.7.2 \ No newline at end of file +loguru~=0.7.2 + +python-amazon-sp-api \ No newline at end of file diff --git a/src/etsy_api/__init__.py b/src/amazon_api/__init__.py similarity index 100% rename from src/etsy_api/__init__.py rename to src/amazon_api/__init__.py diff --git a/src/amazon_api/get_amazon_api.py b/src/amazon_api/get_amazon_api.py new file mode 100644 index 0000000..b3e7619 --- /dev/null +++ b/src/amazon_api/get_amazon_api.py @@ -0,0 +1,64 @@ +from sp_api.api import Orders +from sp_api.util import throttle_retry, load_all_pages +from sp_api.base import ApiResponse + +import pprint + +from constants.amazon_credentials import CREDENTIALS_ARG +from constants.status import ParserStatus +from utils.safe_ratelimit_amazon import safe_rate_limit +from utils.format_order_data import format_order_data +from schemes.shop_data import ShopData +from schemes.upload_order import OrderData +from api.parser import update_parser_status_by_id +from log.logger import logger + + +class OrderClient: + def __init__(self, shop: ShopData): + self.order_api = Orders(**CREDENTIALS_ARG) + self._list_orders_data = [] + self.shop = shop + + + def _get_all_items(self, order_id) -> list: + items = [] + for i_page in self._load_all_items(order_id=order_id): + for item in i_page.payload.get("OrderItems"): + items.append(item) + return items + + + @throttle_retry() + @load_all_pages() + def load_all_orders(self, **kwargs): + return self.order_api.get_orders(**kwargs) + + + @throttle_retry() + @load_all_pages() + @safe_rate_limit(header_limit=True) + def _load_all_items(self, order_id, **kwargs): + return self.order_api.get_order_items(order_id=order_id, **kwargs) + + + def get_orders_with_items(self, page: ApiResponse) -> list[OrderData] | None: + try: + for order in page.payload.get('Orders'): + _order_id = order["AmazonOrderId"] + logger.info(f"formating order ID: {_order_id}") + order_data = format_order_data( + order=order, + items=self._get_all_items(order_id=_order_id) + ) + self._list_orders_data.append(order_data) + except Exception as e: + logger.critical(f"Some error in getting info from Amazon SP API: {e}") + pprint.pprint(e) + update_parser_status_by_id( + parser_id=self.shop.parser_id, + status=ParserStatus.OK_AND_WAIT + ) + return None + + return self._list_orders_data diff --git a/src/api/auth.py b/src/api/auth.py index 5920058..ef36f62 100644 --- a/src/api/auth.py +++ b/src/api/auth.py @@ -1,8 +1,10 @@ -from configs.env import API_AUTH_TOKEN +from configs import settings from schemes.auth import Auth def authorization() -> Auth: return Auth( - Authorization=f"Bearer {API_AUTH_TOKEN}" + Authorization=f"Bearer {settings.API_AUTH_TOKEN}" ) + + diff --git a/src/api/order.py b/src/api/order.py index 159db23..313a497 100644 --- a/src/api/order.py +++ b/src/api/order.py @@ -1,23 +1,29 @@ import requests as req -from loguru import logger as log from api.auth import authorization -from configs.env import API_URL +from configs import settings from schemes.upload_order import UploadingOrderData +from log.logger import logger +from utils.retry import retry -def upload_orders_data(orders: UploadingOrderData) -> bool: + +@retry() +def upload_orders_data(orders: UploadingOrderData): + logger.info("Posting data to backend...") response = req.post( - f"{API_URL}/parser/orders/upload/", + url=settings.PARSER_ORDER_UPLOAD_URL, headers=authorization().model_dump(), json=orders.model_dump(), ) if response.status_code != 200: - log.error( + logger.error( f""" Some error when uploading orders data, status code: {response.status_code} """ ) - return False - return True + response.raise_for_status() + + + diff --git a/src/api/parser.py b/src/api/parser.py index 03b2e15..e3e08de 100644 --- a/src/api/parser.py +++ b/src/api/parser.py @@ -1,31 +1,41 @@ import requests as req -from loguru import logger as log +from log.logger import logger from api.auth import authorization -from configs.env import API_URL +from configs import settings def update_parser_status_by_id( - parser_id: int, status: int, last_parsed: float | None = None + parser_id: int, + status: int, + last_parsed: float | None = None ): data = { "id": parser_id, "status": status, } + if last_parsed: data.update({"last_parsed": last_parsed}) + try: response = req.put( - f"{API_URL}/parser/", headers=authorization().model_dump(), json=data + url=settings.PARSER_STATUS_URL, + headers=authorization().model_dump(), + json=data ) - except Exception as e: - return update_parser_status_by_id(parser_id, status) - if response.status_code != 200: - log.error( + + if response.status_code != 200: + raise ValueError(f"Unexpected status code: {response.status_code}") + + except ValueError: + logger.error( f""" - Some error when updating parser status. - Parser ID: {parser_id} - Status code: {response.status_code} - Details: {response.json()['detail']} - """ + Some error when updating parser status. + Parser ID: {parser_id} + Status code: {response.status_code} + Details: {response.json()['detail']} + """ ) + except Exception as e: + logger.error(f"Some wrong with parser status updating = {e}") diff --git a/src/auth_code_endpoint.py b/src/auth_code_endpoint.py deleted file mode 100644 index aa888d2..0000000 --- a/src/auth_code_endpoint.py +++ /dev/null @@ -1,19 +0,0 @@ -import json - -import uvicorn -from constants.auth_code import AUTH_CODE_RESPONSE_FILE_PATH -from fastapi import FastAPI, Response - -app = FastAPI() - - -@app.get("/auth_code") -def get_auth_code(code: str, state: str): - with open(AUTH_CODE_RESPONSE_FILE_PATH, 'w') as f: - json.dump({"code": code}, f) - - return Response(status_code=200) - - -if __name__ == '__main__': - uvicorn.run(app, host="0.0.0.0", port=8000) diff --git a/src/configs/__init__.py b/src/configs/__init__.py index e69de29..f4cc55b 100644 --- a/src/configs/__init__.py +++ b/src/configs/__init__.py @@ -0,0 +1,5 @@ +from .env import Settings + + +settings = Settings() #type: ignore + diff --git a/src/configs/env.py b/src/configs/env.py index 685a55f..87b2c70 100644 --- a/src/configs/env.py +++ b/src/configs/env.py @@ -1,18 +1,28 @@ -import os +from pydantic_settings import BaseSettings, SettingsConfigDict -from dotenv import load_dotenv -load_dotenv() +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env") -ETSY_API_KEY = os.getenv('ETSY_API_KEY') -ETSY_API_SHARED_SECRET = os.getenv('ETSY_API_SHARED_SECRET') + LWA_APP_ID: str + LWA_CLIENT_SECRET: str + SP_API_REFRESH_TOKEN: str -ETSY_API_REDIRECT_URL = os.getenv('ETSY_API_REDIRECT_URL') + API_URL: str + API_AUTH_TOKEN: str -CODE_VERIFIER = os.getenv('CODE_VERIFIER') + LOG_FILE: str + DATA_FOLDER_PATH: str -API_URL = os.getenv('API_URL') -API_AUTH_TOKEN = os.getenv("API_AUTH_TOKEN") + @property + def SHOPS_DATA_FILE_PATH(self): + return f"{self.DATA_FOLDER_PATH}/shops/shops_amazon.json" -DATA_FOLDER_PATH = os.getenv('DATA_FOLDER_PATH') -LOG_FILE = os.getenv("LOG_FILE") + + @property + def PARSER_ORDER_UPLOAD_URL(self): + return f"{self.API_URL}/parser/orders/upload/amazon" + + @property + def PARSER_STATUS_URL(self): + return f"{self.API_URL}/parser/" \ No newline at end of file diff --git a/src/constants/amazon_credentials.py b/src/constants/amazon_credentials.py new file mode 100644 index 0000000..73e09e8 --- /dev/null +++ b/src/constants/amazon_credentials.py @@ -0,0 +1,9 @@ +from configs import settings + +CREDENTIALS_ARG = dict( + refresh_token=settings.SP_API_REFRESH_TOKEN, + credentials=dict( + lwa_app_id=settings.LWA_APP_ID, + lwa_client_secret=settings.LWA_CLIENT_SECRET + ) +) diff --git a/src/constants/amazon_dates.py b/src/constants/amazon_dates.py new file mode 100644 index 0000000..a255d45 --- /dev/null +++ b/src/constants/amazon_dates.py @@ -0,0 +1,15 @@ +from datetime import datetime, timezone, timedelta + +# WARNING: only ISO8601, endswith == Z + +#first order was offer in 2024/2/17 +EARLIEST_DATE = datetime(2024, 1, 1, tzinfo=timezone.utc).isoformat().replace("+00:00", 'Z') + +#current date +CURRENT_DATE=datetime.now(timezone.utc).isoformat().replace('+00:00', 'Z') + +#last month date +LAST_MONTH_DATE = (datetime.now(timezone.utc) - timedelta(30)).isoformat().replace("+00:00", "Z") + +#last week date +LAST_WEEK_DATE = (datetime.now(timezone.utc) - timedelta(7)).isoformat().replace("+00:00", "Z") \ No newline at end of file diff --git a/src/constants/commands.py b/src/constants/commands.py deleted file mode 100644 index c07450c..0000000 --- a/src/constants/commands.py +++ /dev/null @@ -1,4 +0,0 @@ -class ParserCommand: - NO_COMMAND = 0 - PARSE_NOW = 1 - UPDATE_ETSY_COOKIE = 2 diff --git a/src/constants/etsy_oauth.py b/src/constants/etsy_oauth.py deleted file mode 100644 index 3bef3a7..0000000 --- a/src/constants/etsy_oauth.py +++ /dev/null @@ -1,27 +0,0 @@ -from etsyv3.util.auth.auth_helper import AuthHelper - -from configs.env import ETSY_API_KEY, ETSY_API_REDIRECT_URL, CODE_VERIFIER - -ETSY_OAUTH_CONNECT_URL = "https://www.etsy.com/oauth/connect" -ETSY_OAUTH_TOKEN_URL = "https://api.etsy.com/v3/public/oauth/token" - -SCOPES = [ - "listings_r", - "transactions_r", - "billing_r", - "shops_r", - "address_r", - "profile_r", - "feedback_r", - "recommend_r", -] - -STATE = "superstate" - -etsy_auth = AuthHelper( - keystring=ETSY_API_KEY, - redirect_uri=ETSY_API_REDIRECT_URL, - scopes=SCOPES, - code_verifier=CODE_VERIFIER, - state=STATE, -) diff --git a/src/constants/files_paths.py b/src/constants/files_paths.py deleted file mode 100644 index 7e16aff..0000000 --- a/src/constants/files_paths.py +++ /dev/null @@ -1,3 +0,0 @@ -from configs.env import DATA_FOLDER_PATH - -SHOPS_DATA_FILE_PATH = f"{DATA_FOLDER_PATH}/shops/shops.json" diff --git a/src/constants/shops_names.py b/src/constants/shops_names.py deleted file mode 100644 index 0fdc7dc..0000000 --- a/src/constants/shops_names.py +++ /dev/null @@ -1,5 +0,0 @@ -class ShopName: - NIKO = "NiKoEngraving" - ALDA = "AldaProduction" - DADA = "DaDaTeamEngraving" - ELMA = "ElmaVadaStudio" diff --git a/src/constants/status.py b/src/constants/status.py index a1ee38f..2df8256 100644 --- a/src/constants/status.py +++ b/src/constants/status.py @@ -3,3 +3,43 @@ class ParserStatus: PARSING = 1 COOKIE_EXPIRED = 2 ETSY_API_ERROR = 3 + + +class OrderStatus: + Paid = "Paid" + Canceled = "Canceled" + Completed = "Completed" + PartiallyRefunded = "Partially Refunded" + FullyRefunded = "Fully Refunded" + + @classmethod + def get_backend_status(cls, amazon_status): + + """ + PendingAvailability — Available only for pre-orders. The order has been placed, but the payment has not been authorized, and the release date of the product is in the future. + + Pending — The order has been placed, but the payment has not yet been authorized. The order is not ready for shipment. + + Unshipped — Payment is authorized, the order is ready for shipment, but no product has been shipped yet. + + PartiallyShipped — One or more items have been shipped, but not all. + + Shipped — All items in the order have been shipped. + + InvoiceUnconfirmed — All items have been shipped, but the seller has not yet confirmed to Amazon that the invoice has been sent to the buyer. + + Cancelled — The order has been cancelled. + + Unfulfillable — The order cannot be completed. This status only applies to orders completed by Amazon that have not been placed on the Amazon retail website. + """ + mapping = { + "Pending": cls.Paid, + "Unshipped": cls.Paid, + "PendingAvailability": cls.Paid, + "PartiallyShipped": cls.Completed, + "Shipped": cls.Completed, + "InvoiceUnconfirmed": cls.Paid, + "Canceled": cls.Canceled, + "Unfulfillable": cls.Canceled + } + return mapping.get(amazon_status) \ No newline at end of file diff --git a/src/db_handler_main.py b/src/db_handler_main.py deleted file mode 100644 index 4f82395..0000000 --- a/src/db_handler_main.py +++ /dev/null @@ -1,48 +0,0 @@ -import json -import time - -from api.command_handler import get_parser_info, update_parser_command_to_default -from configs.env import DATA_FOLDER_PATH -from constants.commands import ParserCommand -from schemes.parser_info import Parser - - -def update_commands_json(command: int, parser_id: int): - with open(f"{DATA_FOLDER_PATH}/shops/commands.json", "r") as f: - data = json.load(f) - - data[parser_id - 1]["command"] = command - - with open(f"{DATA_FOLDER_PATH}/shops/commands.json", "w") as f: - json.dump(data, f) - - print(data) - - -def command_check(parser: Parser): - command = parser.command - if command == ParserCommand.NO_COMMAND: - return - update_commands_json(command, parser.id) - update_parser_command_to_default(parser.id) - # elif command == ParserCommand.PARSE_NOW: - # pass - # elif command == ParserCommand.UPDATE_ETSY_COOKIE: - # pass - - -def main(): - while True: - try: - with open(f"{DATA_FOLDER_PATH}/shops/shops.json", "r") as f: - data = json.load(f) - except Exception: - continue - for shop in data: - parser = get_parser_info(shop["parser_id"]) - command_check(parser) - time.sleep(15) - - -if __name__ == "__main__": - main() diff --git a/src/etsy_api/get_etsy_api.py b/src/etsy_api/get_etsy_api.py deleted file mode 100644 index 3550de0..0000000 --- a/src/etsy_api/get_etsy_api.py +++ /dev/null @@ -1,154 +0,0 @@ -import json -import time -from datetime import datetime, timedelta - -from etsyv3 import EtsyAPI -from loguru import logger as log -from typing_extensions import deprecated - -from configs.env import ETSY_API_KEY -from constants.etsy_oauth import etsy_auth, STATE -from constants.files_paths import SHOPS_DATA_FILE_PATH -from schemes.access_token import AuthToken -from schemes.auth import AuthCode -from schemes.shop_data import ShopData - -AUTH_CODE_WAIT_TIME_IN_SECONDS = 15 -AUTH_TOKEN_LIFE_TIME_IN_SECONDS = 3600 - - -class SouvTechEtsyAPI(EtsyAPI): - shop_id: int - - def refresh(self) -> tuple[str, str, datetime]: - log.info(f"Custom refreshing Etsy access token..") - data = { - "grant_type": "refresh_token", - "client_id": self.keystring, - "refresh_token": self.refresh_token, - } - del self.session.headers["Authorization"] - r = self.session.post("https://api.etsy.com/v3/public/oauth/token", data=data) - log.info(f"Refresh token status code: {r.status_code}") - refreshed = r.json() - log.info(f"Refresh token response: {refreshed}") - self.token = refreshed["access_token"] - self.refresh_token = refreshed["refresh_token"] - tmp_expiry = datetime.utcnow() + timedelta(seconds=refreshed["expires_in"]) - self.expiry = tmp_expiry - self.session.headers["Authorization"] = "Bearer " + self.token - if self.refresh_save is not None: - self.refresh_save(self.token, self.refresh_token, self.expiry) - - # Update access token info in config - log.success(f"New access token: {self.token}") - log.success(f"New refresh token: {self.refresh_token}") - log.success(f"New expiry: {self.expiry}") - - new_auth_token = AuthToken( - access_token=self.token, - refresh_token=self.refresh_token, - expires_at=self.expiry.timestamp(), - ) - _save_auth_token(new_auth_token, self.shop_id) - ######### - - return self.token, self.refresh_token, self.expiry - - -def _get_shop_data_by_id(shop_id: int) -> ShopData: - with open(SHOPS_DATA_FILE_PATH) as f: - shops_data = [ShopData(**shop) for shop in json.load(f)] - for shop in shops_data: - if shop.shop_id == shop_id: - return shop - - -def _save_auth_token(auth_token: AuthToken, shop_id: int): - with open(SHOPS_DATA_FILE_PATH) as f: - shops_data = [ShopData(**shop_data) for shop_data in json.load(f)] - for shop in shops_data: - if shop.shop_id == shop_id: - shop.shop_token = auth_token.access_token - shop.shop_refresh_token = auth_token.refresh_token - shop.expiry = auth_token.expires_at - with open(SHOPS_DATA_FILE_PATH, 'w') as f: - json.dump([shop_data.model_dump() for shop_data in shops_data], f) - - -def _get_auth_code(shop_id: int) -> AuthCode: - shop_data = _get_shop_data_by_id(shop_id) - - auth_code_exists = shop_data.shop_auth_code.strip() - if not auth_code_exists: - auth_url, _ = etsy_auth.get_auth_code() - print(f"Auth code is not found. Open {auth_url} to grant access.") - time.sleep(AUTH_CODE_WAIT_TIME_IN_SECONDS) - return _get_auth_code(shop_id) - - auth_code_response = AuthCode(code=shop_data.shop_auth_code) - return auth_code_response - - -def _get_auth_token(shop_id: int) -> AuthToken: - shop_data = _get_shop_data_by_id(shop_id) - - auth_token_exists = shop_data.shop_token.strip() and shop_data.shop_refresh_token.strip() and shop_data.expiry != 0 - if not auth_token_exists: - auth_code_response = _get_auth_code(shop_id) - etsy_auth.set_authorisation_code( - code=auth_code_response.code, - state=STATE, - ) - auth_token_response = etsy_auth.get_access_token() - log.info(f"Auth token response: {auth_token_response}") - auth_token = AuthToken( - access_token=auth_token_response['access_token'], - refresh_token=auth_token_response['refresh_token'], - expires_at=auth_token_response['expires_at'], - ) - _save_auth_token(auth_token, shop_id) - return _get_auth_token(shop_id) - - auth_token = AuthToken( - access_token=shop_data.shop_token, - refresh_token=shop_data.shop_refresh_token, - expires_at=shop_data.expiry, - ) - return auth_token - - -@deprecated("Token refresh inside EtsyAPI, not needed") -def refresh_auth_token(etsy_api: SouvTechEtsyAPI, shop_id: int): - new_access_token, new_refresh_token, new_expires_at = etsy_api.refresh() - log.success(f"New access token: {new_access_token}") - log.success(f"New refresh token: {new_refresh_token}") - log.success(f"New expiry: {new_expires_at}") - new_auth_token = AuthToken( - access_token=new_access_token, - refresh_token=new_refresh_token, - expires_at=new_expires_at.timestamp(), - ) - _save_auth_token(new_auth_token, shop_id) - - -def get_etsy_api(shop_id: int): - auth_token = _get_auth_token(shop_id) - - etsy_api = SouvTechEtsyAPI( - keystring=ETSY_API_KEY, - token=auth_token.access_token, - refresh_token=auth_token.refresh_token, - expiry=datetime.fromtimestamp(auth_token.expires_at), - ) - etsy_api.shop_id = shop_id - time.sleep(3) - # try: - # etsy_api.ping() - # except Unauthorised: - # log.warning(f"Token is expired. Requesting new token.") - # time.sleep(10) - # refresh_auth_token(etsy_api, shop_id) - # return get_etsy_api(shop_id) - - return etsy_api diff --git a/src/etsy_api/orders.py b/src/etsy_api/orders.py deleted file mode 100644 index 32d76b8..0000000 --- a/src/etsy_api/orders.py +++ /dev/null @@ -1,13 +0,0 @@ -from etsy_api.get_etsy_api import get_etsy_api - - -def get_all_orders_by_shop_id(etsy_shop_id: int, shop_id: int, limit: int = 100, offset: int = 0): - etsy_api = get_etsy_api(shop_id) - orders = etsy_api.get_shop_receipts( - shop_id=etsy_shop_id, - was_paid=None, - was_shipped=None, - limit=limit, - offset=offset, - ) - return orders['results'], orders['count'] diff --git a/src/etsy_api/shops.py b/src/etsy_api/shops.py deleted file mode 100644 index c696933..0000000 --- a/src/etsy_api/shops.py +++ /dev/null @@ -1,20 +0,0 @@ -from constants.shops_names import ShopName -from etsy_api.get_etsy_api import get_etsy_api - - -def find_shop_by_name(shop_name: str, shop_id: int): - etsy_api = get_etsy_api(shop_id) - shop = etsy_api.find_shops(shop_name) - return shop - - -if __name__ == "__main__": - shop_name = ShopName.ALDA - shop = find_shop_by_name(shop_name, shop_id=2) - print(shop) - # with open(f"{shop_name}.json", 'w') as f: - # json.dump(shop, f) - - # shop_id = 50508356 - # shop = get_shop_by_id(shop_id) - # print(shop) diff --git a/src/log/logger.py b/src/log/logger.py new file mode 100644 index 0000000..a037a17 --- /dev/null +++ b/src/log/logger.py @@ -0,0 +1,14 @@ +from loguru import logger +from configs import settings + +logger.add( + settings.LOG_FILE, + format="{time} {level} {message}", + level="DEBUG", + rotation="100 MB", + compression="zip", + serialize=True, +) + + +__all__ = ["logger"] diff --git a/src/parser.py b/src/parser.py index bc48206..ca6a45c 100644 --- a/src/parser.py +++ b/src/parser.py @@ -1,110 +1,68 @@ import concurrent.futures -import pprint +import json import time -from datetime import datetime, timedelta +from datetime import datetime, timezone -from loguru import logger as log - -from api.order import upload_orders_data from api.parser import update_parser_status_by_id -from configs.env import LOG_FILE +from api.order import upload_orders_data from constants.status import ParserStatus -from etsy_api.get_etsy_api import get_etsy_api -from etsy_api.orders import get_all_orders_by_shop_id -from schemes.upload_order import UploadingOrderData, OrderData -from utils.format_order_data import format_order_data +from schemes.shop_data import ShopData +from schemes.upload_order import UploadingOrderData from utils.parser_shops_data import get_parser_shops_data +from amazon_api.get_amazon_api import OrderClient -log.add( - LOG_FILE, - format="{time} {level} {message}", - level="DEBUG", - rotation="100 MB", - compression="zip", - serialize=True, -) +from constants.amazon_dates import LAST_MONTH_DATE, EARLIEST_DATE +from log.logger import logger -# Every 30 minutes PARSER_WAIT_TIME_IN_SECONDS = 60 * 30 -def process_single_shop(shop): +def process_single_shop(shop: ShopData): + order_cl = OrderClient(shop=shop) + created_after = LAST_MONTH_DATE + shop_error = False + offset = 0 + start_time_shop = datetime.now() + weekday = datetime.now().weekday() now_hour = datetime.now().hour - shop_error = False + logger.info(f"Parsing shop {shop.shop_id} - {shop.shop_name}...") + logger.info( + f"Updating parser {shop.parser_id} status to {ParserStatus.PARSING}..." + ) - start_time_shop = datetime.now() - log.info(f"Parsing shop {shop.shop_id} - {shop.shop_name}...") - log.info(f"Updating parser {shop.parser_id} status to {ParserStatus.PARSING}...") update_parser_status_by_id( parser_id=shop.parser_id, status=ParserStatus.PARSING, ) - log.success(f"Parser status updated.") + logger.success(f"Parser status updated.") - # Initializing constants - that_month = True - offset = 0 - date = datetime.now() - timedelta(days=30) - weekday = datetime.now().weekday() - ########################## + for page_orders in order_cl.load_all_orders(CreatedAfter=created_after): + """Every 100 orders after """ - while that_month: uploading_orders = UploadingOrderData(shop_id=shop.shop_id, orders_data=[]) - log.info( + logger.info( f"Fetching orders from {offset} to {offset + 100} from shop {shop.shop_name}..." ) - try: - shop_orders, _ = get_all_orders_by_shop_id( - etsy_shop_id=int(shop.etsy_shop_id), - shop_id=shop.shop_id, - limit=100, - offset=offset, - ) - except Exception as e: - log.critical(f"Some error in getting info from ETSY API: {e}") - pprint.pprint(e) - update_parser_status_by_id( - parser_id=shop.parser_id, - status=ParserStatus.ETSY_API_ERROR, - ) + + orders_data = order_cl.get_orders_with_items(page=page_orders) + if orders_data is None: shop_error = True break - # Get order details and split for creating and updating - for shop_order in shop_orders: + uploading_orders.orders_data.extend(orders_data) - order, goods_in_order, day, month, client, city = format_order_data( - order=shop_order, - ) - if day <= date.day and month == date.month: - that_month = False - break - - uploading_orders.orders_data.append( - OrderData( - order=order, - client=client, - city=city, - order_items=goods_in_order, - ) - ) - number_of_attempts = 0 - while number_of_attempts < 10: - res = upload_orders_data(uploading_orders) - if res: - break - number_of_attempts += 1 - if number_of_attempts == 10: - log.critical(f"Some error on sending info to backend") + try: + upload_orders_data(uploading_orders) # upload data to backend + except: + logger.critical(f"Some error on sending info to backend") update_parser_status_by_id( parser_id=shop.parser_id, status=ParserStatus.ETSY_API_ERROR, ) shop_error = True - break offset += 100 @@ -114,9 +72,9 @@ def process_single_shop(shop): break if shop_error: - log.error(f"Shop {shop.shop_id} - {shop.shop_name} parsed with error.") + logger.error(f"Shop {shop.shop_id} - {shop.shop_name} parsed with error.") return - log.info( + logger.info( f"Updating parser {shop.parser_id} status to {ParserStatus.OK_AND_WAIT}..." ) @@ -126,32 +84,29 @@ def process_single_shop(shop): last_parsed=datetime.now().timestamp(), ) - log.success(f"Parser status updated.") - log.success(f"Shop {shop.shop_id} - {shop.shop_name} parsed.") + logger.success(f"Parser status updated.") + logger.success(f"Shop {shop.shop_id} - {shop.shop_name} parsed.") end_time_shop = datetime.now() - log.info( + logger.info( f"Shop {shop.shop_id} - {shop.shop_name} parsing time: {end_time_shop - start_time_shop}" ) def etsy_api_parser(): shops_data = get_parser_shops_data() - # for shop in shops_data: - # etsy_api = get_etsy_api(shop.shop_id) - # Используем ThreadPoolExecutor для параллельной обработки + + # using ThreadPoolExecutor for parallel processing with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - # Запускаем обработку каждого магазина в отдельном потоке + # Starting process for each shop in a separate thread futures = [executor.submit(process_single_shop, shop) for shop in shops_data] - # Ждем завершения всех задач + # Waiting for all tasks to complete concurrent.futures.wait(futures) - # Проверяем, были ли исключения + # checking for any exceptions for future in futures: if future.exception(): - log.error(f"Error in thread: {future.exception()}") - log.success(f"Parsed all shops waiting {PARSER_WAIT_TIME_IN_SECONDS} to repeat") - + logger.error(f"Error in thread: {future.exception()}") + logger.success(f"Parsed all shops waiting {PARSER_WAIT_TIME_IN_SECONDS} to repeat") -# TODO: сделать чтобы файлы с кредами не писались в файл в многопотоке if __name__ == "__main__": while True: @@ -159,5 +114,5 @@ def etsy_api_parser(): etsy_api_parser() time.sleep(PARSER_WAIT_TIME_IN_SECONDS) except Exception as e: - log.error(f"Error on fetching orders {e}") + logger.error(f"Error on fetching orders {e}") time.sleep(900) diff --git a/src/parser_all.py b/src/parser_all.py index 274c5b5..02106c2 100644 --- a/src/parser_all.py +++ b/src/parser_all.py @@ -1,126 +1,69 @@ import concurrent.futures -import pprint -import time from datetime import datetime -from loguru import logger as log - -from api.order import upload_orders_data from api.parser import update_parser_status_by_id -from configs.env import LOG_FILE +from api.order import upload_orders_data from constants.status import ParserStatus -from etsy_api.orders import get_all_orders_by_shop_id -from schemes.upload_order import UploadingOrderData, OrderData -from utils.format_order_data import format_order_data +from schemes.shop_data import ShopData +from schemes.upload_order import UploadingOrderData from utils.parser_shops_data import get_parser_shops_data +from amazon_api.get_amazon_api import OrderClient -log.add( - LOG_FILE, - format="{time} {level} {message}", - level="DEBUG", - rotation="100 MB", - compression="zip", - serialize=True, -) - -# Every 30 minutes -PARSER_WAIT_TIME_IN_SECONDS = 60 * 30 +from constants.amazon_dates import LAST_MONTH_DATE, EARLIEST_DATE, LAST_WEEK_DATE +from log.logger import logger -def process_single_shop(shop): - shops_data = get_parser_shops_data() - now_hour = datetime.now().hour - +def process_single_shop(shop: ShopData): + order_cl = OrderClient(shop=shop) + created_after = EARLIEST_DATE shop_error = False - + offset = 0 start_time_shop = datetime.now() - log.info(f"Parsing shop {shop.shop_id} - {shop.shop_name}...") - log.info( + + logger.info(f"Parsing shop {shop.shop_id} - {shop.shop_name}...") + logger.info( f"Updating parser {shop.parser_id} status to {ParserStatus.PARSING}..." ) + update_parser_status_by_id( parser_id=shop.parser_id, status=ParserStatus.PARSING, ) - log.success(f"Parser status updated.") + logger.success(f"Parser status updated.") - # Initializing constants - offset = 0 - ########################## - try: - shop_orders, orders_count = get_all_orders_by_shop_id( - etsy_shop_id=int(shop.etsy_shop_id), - shop_id=shop.shop_id, - limit=100, - offset=offset, - ) - except Exception as e: - log.critical(f"Some error in getting info from ETSY API: {e}") - pprint.pprint(e) - update_parser_status_by_id( - parser_id=shop.parser_id, - status=ParserStatus.ETSY_API_ERROR, - ) - return - while offset <= orders_count: - uploading_orders = UploadingOrderData(shop_id=shop.shop_id, is_full_data_updating=True, orders_data=[]) + for page_orders in order_cl.load_all_orders(CreatedAfter=created_after): + """Every 100 orders after """ - log.info( + uploading_orders = UploadingOrderData(shop_id=shop.shop_id, orders_data=[]) + + logger.info( f"Fetching orders from {offset} to {offset + 100} from shop {shop.shop_name}..." ) - if offset: - try: - shop_orders, _ = get_all_orders_by_shop_id( - etsy_shop_id=int(shop.etsy_shop_id), - shop_id=shop.shop_id, - limit=100, - offset=offset, - ) - except Exception as e: - log.critical(f"Some error in getting info from ETSY API: {e}") - pprint.pprint(e) - update_parser_status_by_id( - parser_id=shop.parser_id, - status=ParserStatus.ETSY_API_ERROR, - ) - shop_error = True - break - - # Get order details and split for creating and updating - # Get order details and split for creating and updating - for shop_order in shop_orders: - order, goods_in_order, day, month, client, city = format_order_data( - order=shop_order, - ) - uploading_orders.orders_data.append( - OrderData( - order=order, - client=client, - city=city, - order_items=goods_in_order, - ) - ) - number_of_attempts = 0 - while number_of_attempts < 10: - res = upload_orders_data(uploading_orders) - if res: - break - number_of_attempts += 1 - if number_of_attempts == 10: - log.critical(f"Some error on sending info to backend") + + orders_data = order_cl.get_orders_with_items(page=page_orders) + if orders_data is None: + shop_error = True + break + + uploading_orders.orders_data.extend(orders_data) + + try: + upload_orders_data(uploading_orders) # send data to backend + except: + logger.critical(f"Some error on sending info to backend") update_parser_status_by_id( parser_id=shop.parser_id, status=ParserStatus.ETSY_API_ERROR, ) shop_error = True - offset += 100 - if shop_error: - log.error(f"Shop {shop.shop_id} - {shop.shop_name} parsed with error.") - return + offset += 100 - log.info( + if shop_error: + logger.error(f"Shop {shop.shop_id} - {shop.shop_name} parsed with error.") + return + logger.info( f"Updating parser {shop.parser_id} status to {ParserStatus.OK_AND_WAIT}..." ) @@ -130,38 +73,31 @@ def process_single_shop(shop): last_parsed=datetime.now().timestamp(), ) - log.success(f"Parser status updated.") - log.success(f"Shop {shop.shop_id} - {shop.shop_name} parsed.") + logger.success(f"Parser status updated.") + logger.success(f"Shop {shop.shop_id} - {shop.shop_name} parsed.") end_time_shop = datetime.now() - log.info(f"Shop parsing time: {end_time_shop - start_time_shop}") - - log.success( - f"Parsing finished, wait {PARSER_WAIT_TIME_IN_SECONDS} seconds to repeat." + logger.info( + f"Shop {shop.shop_id} - {shop.shop_name} parsing time: {end_time_shop - start_time_shop}" ) def etsy_api_parser(): shops_data = get_parser_shops_data() - # Используем ThreadPoolExecutor для параллельной обработки + # using ThreadPoolExecutor for parallel processing with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - # Запускаем обработку каждого магазина в отдельном потоке + # Starting process for each shop in a separate thread futures = [executor.submit(process_single_shop, shop) for shop in shops_data] - # Ждем завершения всех задач + # Waiting for all tasks to complete concurrent.futures.wait(futures) - # Проверяем, были ли исключения + # checking for any exceptions for future in futures: if future.exception(): - log.error(f"Error in thread: {future.exception()}") - log.success(f"Parsed all shops waiting {PARSER_WAIT_TIME_IN_SECONDS} to repeat") + logger.error(f"Error in thread: {future.exception()}") if __name__ == "__main__": - try: etsy_api_parser() - time.sleep(PARSER_WAIT_TIME_IN_SECONDS) - time.sleep(900) except Exception as e: - log.error(f"Error on fetching orders {e}") - time.sleep(900) + logger.error(f"Error on fetching orders {e}") diff --git a/src/parser_debug.py b/src/parser_debug.py index 4cb5088..18ff1fd 100644 --- a/src/parser_debug.py +++ b/src/parser_debug.py @@ -1,114 +1,64 @@ -import concurrent.futures import json -import pprint -import time -from datetime import datetime, timedelta +from datetime import datetime -from loguru import logger as log - -from api.order import upload_orders_data from api.parser import update_parser_status_by_id -from configs.env import LOG_FILE from constants.status import ParserStatus -from etsy_api.orders import get_all_orders_by_shop_id -from schemes.upload_order import UploadingOrderData, OrderData -from utils.format_order_data import format_order_data +from schemes.shop_data import ShopData +from schemes.upload_order import UploadingOrderData from utils.parser_shops_data import get_parser_shops_data +from amazon_api.get_amazon_api import OrderClient -log.add( - LOG_FILE, - format="{time} {level} {message}", - level="DEBUG", - rotation="100 MB", - compression="zip", - serialize=True, -) - -# Every 30 minutes -PARSER_WAIT_TIME_IN_SECONDS = 60 * 30 +from constants.amazon_dates import LAST_MONTH_DATE, EARLIEST_DATE +from log.logger import logger -def process_single_shop(shop): - now_hour = datetime.now().hour +def process_single_shop(shop: ShopData): + order_cl = OrderClient(shop=shop) + created_after = LAST_MONTH_DATE shop_error = False - + offset = 0 start_time_shop = datetime.now() - log.info(f"Parsing shop {shop.shop_id} - {shop.shop_name}...") - log.info(f"Updating parser {shop.parser_id} status to {ParserStatus.PARSING}...") + + + logger.info(f"Parsing shop {shop.shop_id} - {shop.shop_name}...") + logger.info( + f"Updating parser {shop.parser_id} status to {ParserStatus.PARSING}..." + ) + update_parser_status_by_id( parser_id=shop.parser_id, status=ParserStatus.PARSING, ) - log.success(f"Parser status updated.") + logger.success(f"Parser status updated.") - # Initializing constants - that_month = True - offset = 0 - date = datetime.now() - timedelta(days=30) - weekday = datetime.now().weekday() - ########################## + for page_orders in order_cl.load_all_orders(CreatedAfter=created_after): + """Every 100 orders after """ - while that_month: uploading_orders = UploadingOrderData(shop_id=shop.shop_id, orders_data=[]) - log.info( + logger.info( f"Fetching orders from {offset} to {offset + 100} from shop {shop.shop_name}..." ) - try: - shop_orders, _ = get_all_orders_by_shop_id( - etsy_shop_id=int(shop.etsy_shop_id), - shop_id=shop.shop_id, - limit=100, - offset=offset, - ) - except Exception as e: - log.critical(f"Some error in getting info from ETSY API: {e}") - pprint.pprint(e) - update_parser_status_by_id( - parser_id=shop.parser_id, - status=ParserStatus.ETSY_API_ERROR, - ) + + orders_data = order_cl.get_orders_with_items(page=page_orders) + if orders_data is None: shop_error = True break - # Get order details and split for creating and updating - for shop_order in shop_orders: - - order, goods_in_order, day, month, client, city = format_order_data( - order=shop_order, - ) - if day <= date.day and month == date.month: - that_month = False - break - - uploading_orders.orders_data.append( - OrderData( - order=order, - client=client, - city=city, - order_items=goods_in_order, - ) - ) - - # res = upload_orders_data(uploading_orders) - with open("test_data.json", "w") as f: - json.dump(uploading_orders.model_dump(), f) + uploading_orders.orders_data.extend(orders_data) - return + with open("test_data_2.json", "w") as f: + json.dump(uploading_orders.model_dump(), f) offset += 100 - if offset > 200: - if now_hour == 20 and weekday in (5, 6): - continue - break if shop_error: - log.error(f"Shop {shop.shop_id} - {shop.shop_name} parsed with error.") + logger.error(f"Shop {shop.shop_id} - {shop.shop_name} parsed with error.") return - log.info( + logger.info( f"Updating parser {shop.parser_id} status to {ParserStatus.OK_AND_WAIT}..." ) @@ -118,30 +68,14 @@ def process_single_shop(shop): last_parsed=datetime.now().timestamp(), ) - log.success(f"Parser status updated.") - log.success(f"Shop {shop.shop_id} - {shop.shop_name} parsed.") + logger.success(f"Parser status updated.") + logger.success(f"Shop {shop.shop_id} - {shop.shop_name} parsed.") end_time_shop = datetime.now() - log.info( + logger.info( f"Shop {shop.shop_id} - {shop.shop_name} parsing time: {end_time_shop - start_time_shop}" ) -def etsy_api_parser(): - shops_data = get_parser_shops_data() - - # Используем ThreadPoolExecutor для параллельной обработки - with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - # Запускаем обработку каждого магазина в отдельном потоке - futures = [executor.submit(process_single_shop, shop) for shop in shops_data] - # Ждем завершения всех задач - concurrent.futures.wait(futures) - # Проверяем, были ли исключения - for future in futures: - if future.exception(): - log.error(f"Error in thread: {future.exception()}") - log.success(f"Parsed all shops waiting {PARSER_WAIT_TIME_IN_SECONDS} to repeat") - - if __name__ == "__main__": shops_data = get_parser_shops_data() - process_single_shop(shops_data[1]) + process_single_shop(shops_data[0]) diff --git a/src/schemes/access_token.py b/src/schemes/access_token.py deleted file mode 100644 index 0a38438..0000000 --- a/src/schemes/access_token.py +++ /dev/null @@ -1,7 +0,0 @@ -from pydantic import BaseModel - - -class AuthToken(BaseModel): - access_token: str - refresh_token: str - expires_at: float diff --git a/src/schemes/auth.py b/src/schemes/auth.py index 181dbdd..c002227 100644 --- a/src/schemes/auth.py +++ b/src/schemes/auth.py @@ -4,6 +4,3 @@ class Auth(BaseModel): Authorization: str - -class AuthCode(BaseModel): - code: str diff --git a/src/schemes/client.py b/src/schemes/client.py index d67e068..327bfcc 100644 --- a/src/schemes/client.py +++ b/src/schemes/client.py @@ -5,3 +5,4 @@ class Client(BaseModel): user_marketplace_id: str | None = None name: str | None = None email: str | None = None + diff --git a/src/schemes/parser_info.py b/src/schemes/parser_info.py deleted file mode 100644 index 1ec813b..0000000 --- a/src/schemes/parser_info.py +++ /dev/null @@ -1,9 +0,0 @@ -from pydantic import BaseModel - - -class Parser(BaseModel): - id: int - shop_id: int - status: int - command: int - last_parsed: str diff --git a/src/schemes/shop_data.py b/src/schemes/shop_data.py index 168b457..2cd73f0 100644 --- a/src/schemes/shop_data.py +++ b/src/schemes/shop_data.py @@ -5,10 +5,4 @@ class ShopData(BaseModel): parser_id: int shop_id: int shop_name: str - # TODO delete param shop cookie - shop_cookie: str - shop_token: str - shop_refresh_token: str - expiry: float - etsy_shop_id: str - shop_auth_code: str + amazon_shop_id: str diff --git a/src/utils/format_datetime.py b/src/utils/format_datetime.py new file mode 100644 index 0000000..f146a69 --- /dev/null +++ b/src/utils/format_datetime.py @@ -0,0 +1,18 @@ +from datetime import datetime + +def is_iso_utc_z_format(date_str): + if not date_str.endswith('Z'): + return False + try: + dt = datetime.fromisoformat(date_str[:-1] + '+00:00') + return dt.utcoffset().total_seconds() == 0 + except ValueError: + return False + + +def iso_to_simple(iso_str: str): + """iso8601 to dd.mm.YYYY""" + if is_iso_utc_z_format(iso_str): + dt = datetime.strptime(iso_str, "%Y-%m-%dT%H:%M:%SZ") + return f"{dt.day:02d}.{dt.month:02d}.{dt.year}" + return None diff --git a/src/utils/format_order_data.py b/src/utils/format_order_data.py index 6d5bee4..08addb6 100644 --- a/src/utils/format_order_data.py +++ b/src/utils/format_order_data.py @@ -1,116 +1,94 @@ -import json -from datetime import datetime - -from schemes.city import City -from schemes.client import Client from schemes.order import Order +from schemes.client import Client +from schemes.city import City from schemes.order_item import GoodInOrder +from schemes.upload_order import OrderData +from utils.format_datetime import iso_to_simple +from constants.status import OrderStatus -def format_order_data( - order: dict, -): - order_id = order["receipt_id"] - # Good in orders - order_items = [] - order_created_at = datetime.fromtimestamp(order["created_timestamp"]) - ########### - day = order_created_at.day - month = order_created_at.month - year = order_created_at.year - ########### - order_status = order["status"] - # Order date - formated_date = f"{day}.{month}.{year}" - # Full quantity of items in order - full_items_quantity = 0 - - # Getting order shipping info - _shipping_info = order["shipments"] - receipt_shipping_id = "" - tracking_code = "" - if len(_shipping_info): - receipt_shipping_id = str(_shipping_info[0]["receipt_shipping_id"]) - tracking_code = str(_shipping_info[0]["tracking_code"]) - - # Getting order city and state ordered from - city = City() - try: - city = City( - name=order["city"], - state=order["state"], - country=order["country_iso"], - ) - except Exception: - pass - # Getting client info - client = Client() - try: - client = Client( - user_marketplace_id=str(order["buyer_user_id"]), - name=order["name"], - email=order["buyer_email"] +def _format_order(*, + order: dict, order_obj: Order): + """earliest fill order_obj""" + order_obj.buyer_paid = float(order.get("OrderTotal", {}).get("Amount", 0)) + order_obj.order_id = order.get("AmazonOrderId", "") + order_obj.status = OrderStatus.get_backend_status(order.get("OrderStatus")) + order_obj.date = iso_to_simple(order.get("PurchaseDate")) + order_obj.quantity = 0 + order_obj.tax = 0 + + +def _format_good_in_order(*, + item: dict, + item_obj: GoodInOrder): + """fill good_in_order obj""" + + item_price = item.get("ItemPrice") + item_quantity= item.get("QuantityOrdered") + item_discount = item.get("PromotionDiscount") + + item_obj.uniquename = item.get("SellerSKU") + item_obj.quantity = item_quantity + + _amount = 0.0 + if item_price and item_discount and item_quantity: + _amount = ( + (float(item_price["Amount"]) * item_quantity) - float(item_discount["Amount"]) ) - except Exception: - pass - # Creating goods and good in order objects - for trans in order["transactions"]: - # Quantity of item - quantity = trans["quantity"] - # Full quantity of order items - full_items_quantity += quantity - # Name of good - uniquename: str = trans["sku"] - # Transaction ID - listing_id: int = trans.get("listing_id") - # Product ID - product_id: int = trans.get("product_id") - # Transaction Type - transaction_type: str = trans.get("transaction_type") - - # Getting all additional engraving info - engraving_info: dict = {} - for variation in trans["variations"]: - variation_name = variation.get("formatted_name") - variation_value = variation.get("formatted_value") - - engraving_info_item: dict = { - f"{variation_name}": f"{variation_value}", - "listing_id": listing_id, - "product_id": product_id, - "transaction_type": transaction_type, - } - engraving_info.update(engraving_info_item) - - # Convert obj to str - engraving_info_str = json.dumps(engraving_info) - - # Amount of item - price = (trans["price"]["amount"] / trans["price"]["divisor"]) * quantity - amount = price - trans["buyer_coupon"] - ################# - order_items.append( - GoodInOrder( - uniquename=uniquename, - quantity=quantity, - amount=amount, - engraving_info=engraving_info_str, + + item_obj.amount = _amount + item_obj.engraving_info = item["Title"] # TODO СДЕЛАТЬ!!!! + + +def _format_client(*, + order: dict, + client_obj: Client): + client_obj.email = order.get("BuyerInfo", {}).get("BuyerEmail") + +def _format_city(*, + order: dict, + city_obj: City): + shipping_address = order.get("ShippingAddress", {}) + city_obj.name = shipping_address.get("City") + city_obj.state = shipping_address.get("StateOrRegion") + city_obj.country = shipping_address.get("CountryCode") + + +def format_order_data(*, + order: dict, + items: list[dict]) -> OrderData: + order_obj = Order() + client_obj = Client() + city_obj = City() + order_items = [] + + _format_order(order=order, order_obj=order_obj) + _format_client(order=order, client_obj=client_obj) + _format_city(order=order, city_obj=city_obj) + + for item in items: + good_in_order = GoodInOrder() + _format_good_in_order(item=item, item_obj=good_in_order) + + # calculate tax and quantity for order + item_quantity = good_in_order.quantity + item_tax = item.get("ItemTax") + item_discount_tax = item.get("PromotionDiscountTax") + + order_obj.quantity += item_quantity + + _amount_tax = 0.0 + if item_tax and item_discount_tax and item_quantity: + _amount_tax += ( + (item_quantity * float(item_tax["Amount"])) - float(item_discount_tax["Amount"]) ) - ) + order_obj.tax = _amount_tax - order_total = order["grandtotal"] - buyer_paid = order_total["amount"] / order_total["divisor"] - tax_total = order["total_tax_cost"] - tax_amount = tax_total["amount"] / tax_total["divisor"] - order = Order( - status=order_status, - order_id=str(order_id), - date=formated_date, - quantity=full_items_quantity, - buyer_paid=buyer_paid, - tax=tax_amount, - receipt_shipping_id=receipt_shipping_id, - tracking_code=tracking_code, - ) + order_items.append(good_in_order) - return order, order_items, day, month, client, city + return OrderData( + order=order_obj, + client=client_obj, + city=city_obj, + order_items=order_items + ) diff --git a/src/utils/parser_shops_data.py b/src/utils/parser_shops_data.py index 42b798d..5add1e5 100644 --- a/src/utils/parser_shops_data.py +++ b/src/utils/parser_shops_data.py @@ -1,11 +1,10 @@ -import json - -from constants.files_paths import SHOPS_DATA_FILE_PATH +from configs import settings from schemes.shop_data import ShopData +import json def get_parser_shops_data() -> list[ShopData]: - with open(SHOPS_DATA_FILE_PATH) as f: + with open(settings.SHOPS_DATA_FILE_PATH) as f: shops_data = [ShopData(**shop_data) for shop_data in json.load(f)] return shops_data diff --git a/src/utils/retry.py b/src/utils/retry.py new file mode 100644 index 0000000..6691e31 --- /dev/null +++ b/src/utils/retry.py @@ -0,0 +1,22 @@ +from log.logger import logger + +def retry(retry=10, exception_classes=None): + if exception_classes is None: + exception_classes = (Exception,) + + def decorator(function): + def wrapper(*args, **kwargs): + attempts = 0 + while attempts < retry: + try: + return function(*args, **kwargs) + except exception_classes as e: + attempts += 1 + if attempts >= retry: + raise e + logger.critical(f"Retrying {function.__name__} (Attempt {attempts}/{retry}) due to error: {e}") + + wrapper.__doc__ = function.__doc__ + return wrapper + + return decorator diff --git a/src/utils/safe_ratelimit_amazon.py b/src/utils/safe_ratelimit_amazon.py new file mode 100644 index 0000000..50b64ca --- /dev/null +++ b/src/utils/safe_ratelimit_amazon.py @@ -0,0 +1,42 @@ +from sp_api.base import ApiResponse +import time +from loguru import logger + + +def _delay_execution(*, + throttle_by_seconds: int, + header_limit: bool, + rate_limit: float) -> float: + """delay in seconds""" + if header_limit and rate_limit: + return 1 / float(rate_limit) # for dynamically rate limit + return float(throttle_by_seconds) + + +def safe_rate_limit(throttle_by_seconds: int = 1, + header_limit: bool = False): + def decorator(function): + def wrapper(*args, **kwargs): + resp: ApiResponse = function(*args, **kwargs) + if not isinstance(resp, ApiResponse): + return resp + if resp.next_token: + #excludes delay for several pages + return resp + + sleep_time = _delay_execution(throttle_by_seconds=throttle_by_seconds, + header_limit=header_limit, + rate_limit=resp.rate_limit) + + if sleep_time: + time.sleep(sleep_time) + return resp + + wrapper.__doc__ = function.__doc__ + return wrapper + + return decorator + + + +