sp_api_client.py 89 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632
  1. import clickhouse_connect
  2. import time
  3. import warnings
  4. warnings.filterwarnings('ignore')
  5. import numpy as np
  6. from pymysql import Timestamp
  7. from sp_api.util import throttle_retry, load_all_pages
  8. from sp_api.api import Orders,ListingsItems,Inventories,Reports,CatalogItems,Products
  9. from sp_api.base import Marketplaces,ReportType,ProcessingStatus
  10. import pandas as pd
  11. import gzip
  12. from io import BytesIO,StringIO
  13. from datetime import datetime, timedelta,timezone
  14. import pytz
  15. import time
  16. from dateutil.parser import parse
  17. import pymysql
  18. from typing import List, Literal
  19. from random import shuffle
  20. from retry import retry
  21. import requests
  22. from urllib import request
  23. import json
  24. from clickhouse_connect import get_client
  25. try:
  26. from ..settings import MYSQL_AUTH_CONF, MYSQL_DATA_CONF
  27. except:
  28. from sync_amz_data.sync_amz_data.settings import MYSQL_AUTH_CONF, MYSQL_DATA_CONF
  29. class SpApiRequest:
  30. def __init__(self, credentials,marketplace):
  31. self.credentials = credentials
  32. self.marketplace = marketplace
  33. @classmethod
  34. @retry(tries=3, delay=5, backoff=2, )
  35. def Token_auth(cls): # AUTH - mysql_connect_auth
  36. conn = pymysql.connect(**MYSQL_AUTH_CONF)
  37. return conn
  38. @classmethod
  39. @retry(tries=3, delay=5, backoff=2, )
  40. def Data_auth(cls): # DATA - mysql_connect_auth_lst
  41. conn = pymysql.connect(**MYSQL_DATA_CONF)
  42. return conn
  43. @classmethod
  44. @retry(tries=3, delay=5, backoff=2, )
  45. def LocalHost_Auth(cls): # local database-
  46. conn = pymysql.connect(user="huangyifan",
  47. password="123456",
  48. host="127.0.0.1",
  49. database="amz_sp_api",
  50. port=3306)
  51. return conn
  52. @staticmethod
  53. @retry(tries=3, delay=5, backoff=2, )
  54. def auth_info(): # get Auth-data from all of shops - 此连接获取refreshtoken供账户授权使用
  55. auth_conn = SpApiRequest.Token_auth()
  56. cursor = auth_conn.cursor()
  57. cursor.execute("select * from amazon_sp_report.amazon_sp_auth_info;")
  58. columns_name = [i[0] for i in cursor.description]
  59. rel = cursor.fetchall()
  60. df = pd.DataFrame(rel, columns=columns_name)
  61. return df
  62. @classmethod
  63. @retry(tries=3, delay=5, backoff=2, )
  64. def get_refreshtoken(cls): # accroding to differnt shop get diffrent refreshtoken - 获取授权后拿到refreshtoken
  65. df = cls.auth_info()
  66. refreshtoken_list = (df['refresh_token'].to_numpy().tolist())
  67. return refreshtoken_list
  68. @classmethod
  69. def get_catelog(cls,account_name,country=Marketplaces.US,asin=None): # active - 可以使用但未进行使用
  70. if country in [Marketplaces.US, Marketplaces.BR, Marketplaces.CA,Marketplaces.MX]:
  71. region = 'NA'
  72. elif country in [Marketplaces.DE,Marketplaces.AE, Marketplaces.BE, Marketplaces.PL,
  73. Marketplaces.EG,Marketplaces.ES, Marketplaces.GB, Marketplaces.IN, Marketplaces.IT,
  74. Marketplaces.NL, Marketplaces.SA, Marketplaces.SE, Marketplaces.TR,Marketplaces.UK,Marketplaces.FR,
  75. ]:
  76. region = 'EU'
  77. else:
  78. region = str(country)[-2:]
  79. df = cls.auth_info()
  80. try:
  81. refresh_token = df.query("account_name==@account_name and region==@region")['refresh_token'].values[0]
  82. except:
  83. print("请输入正确的account name与Marketplace")
  84. return '获取失败'
  85. cred = {
  86. 'refresh_token': refresh_token,
  87. 'lwa_app_id': 'amzn1.application-oa2-client.1f9d3d4747e14b22b4b598e54e6b922e', # 卖家中心里面开发者资料LWA凭证
  88. 'lwa_client_secret': 'amzn1.oa2-cs.v1.3af0f5649f5b8e151cd5bd25c10f2bf3113172485cd6ffc52ccc6a5e8512b490',
  89. 'aws_access_key': 'AKIARBAGHTGOZC7544GN',
  90. 'aws_secret_key': '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',
  91. 'role_arn': 'arn:aws:iam::070880041373:role/Amazon_SP_API_ROLE'
  92. }
  93. # cate_item = CatalogItems(credentials=cred, marketplace=country)
  94. # images_info = cate_item.get_catalog_item(asin=asin,**{"includedData":['images']})
  95. # print(images_info)
  96. # images = images_info.images[0].get('images')[0]['link']
  97. # title_info = cate_item.get_catalog_item(asin=asin)
  98. # print(title_info)
  99. # title = title_info.payload['summaries'][0]['itemName']
  100. cate_item = CatalogItems(credentials=cred, marketplace=country,version='2022-04-01')
  101. test_bundle = cate_item.get_catalog_item(asin=asin,**{"includedData":['salesRanks']})
  102. print(test_bundle)
  103. # return {'images':images,'title':title}
  104. @retry(tries=3, delay=5, backoff=2,)
  105. def create_report(self,**kwargs): # Main-CreateReport-Function - 创建报告请求
  106. reportType = kwargs['reportType']
  107. reportOptions =kwargs.get("reportOptions")
  108. dataStartTime = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") if kwargs.get("dataStartTime") is None else kwargs.get("dataStartTime")+"T00:00:00"
  109. dataEndTime = datetime.now().strftime("%Y-%m-%dT%H:%M:%S") if kwargs.get("dataEndTime") is None else kwargs.get("dataEndTime")+"T23:59:59"
  110. report = Reports(credentials=self.credentials, marketplace=self.marketplace)
  111. rel = report.create_report(
  112. reportType=reportType,marketplaceIds=[kwargs['marketpalceids'] if kwargs.get('marketpalceids') is not None else self.marketplace.marketplace_id],
  113. reportOptions=reportOptions,dataStartTime=dataStartTime,dataEndTime=dataEndTime
  114. )
  115. reportId = rel.payload.get("reportId")
  116. # print(reportId)
  117. return reportId
  118. # @retry(tries=2, delay=3, backoff=2,)
  119. def decompression(self,reportId): # After-CreateReportFunc-simpleDeal - 根据获取到的报告id进行解压获取
  120. report = Reports(credentials=self.credentials, marketplace=self.marketplace)
  121. while True:
  122. reportId_info = report.get_report(reportId=reportId)
  123. # print(reportId_info.payload)
  124. print("please wait...")
  125. if reportId_info.payload.get("processingStatus")==ProcessingStatus.DONE:
  126. reportDocumentId = reportId_info.payload.get("reportDocumentId")
  127. rp_table = report.get_report_document(reportDocumentId=reportDocumentId,download=False)
  128. print(rp_table)
  129. if rp_table.payload.get('compressionAlgorithm') is not None and self.marketplace.marketplace_id not in ['A1VC38T7YXB528']:#
  130. try:
  131. df = pd.read_table(filepath_or_buffer=rp_table.payload['url'],compression={"method":'gzip'},encoding='iso-8859-1')
  132. except:
  133. df = pd.read_csv(filepath_or_buffer=rp_table.payload['url'], compression={"method": 'gzip'},
  134. encoding='iso-8859-1')
  135. return df
  136. elif rp_table.payload.get('compressionAlgorithm') is not None and self.marketplace.marketplace_id in ['A1VC38T7YXB528']:
  137. df = pd.read_table(filepath_or_buffer=rp_table.payload['url'], compression={"method": 'gzip'},
  138. encoding='Shift-JIS')
  139. # df.columns =
  140. return df
  141. elif rp_table.payload.get('compressionAlgorithm') is None and self.marketplace.marketplace_id not in ['A1VC38T7YXB528']:
  142. df = pd.read_table(rp_table.payload.get("url"),encoding='iso-8859-1')
  143. return df
  144. elif rp_table.payload.get('compressionAlgorithm') is None and self.marketplace.marketplace_id in ['A1VC38T7YXB528']:
  145. df = pd.read_table(rp_table.payload.get("url"),encoding='Shift-JIS')
  146. return df
  147. elif reportId_info.payload.get("processingStatus") in [ProcessingStatus.CANCELLED,ProcessingStatus.FATAL]:
  148. try:
  149. print(reportId_info)
  150. reportDocumentId = reportId_info.payload.get("reportDocumentId")
  151. rp_table = report.get_report_document(reportDocumentId=reportDocumentId, download=True)
  152. print("取消或失败",rp_table)
  153. return pd.DataFrame()
  154. except Exception as e:
  155. print(e)
  156. return pd.DataFrame()
  157. time.sleep(15)
  158. print("please wait...")
  159. def decompression2(self,reportId): # After-CreateReportFunc-simpleDeal - 根据获取到的报告id进行解压获取
  160. report = Reports(credentials=self.credentials, marketplace=self.marketplace)
  161. while True:
  162. reportId_info = report.get_report(reportId=reportId)
  163. # print(reportId_info.payload)
  164. print("please wait...")
  165. if reportId_info.payload.get("processingStatus")==ProcessingStatus.DONE:
  166. reportDocumentId = reportId_info.payload.get("reportDocumentId")
  167. rp_table = report.get_report_document(reportDocumentId=reportDocumentId,download=False)
  168. print(rp_table)
  169. if rp_table.payload.get('compressionAlgorithm') is not None and self.marketplace.marketplace_id not in ['A1VC38T7YXB528']:#
  170. # df = pd.read_json(path_or_buf=rp_table.payload['url'],compression={"method":'gzip'},encoding='iso-8859-1')
  171. response = request.urlopen(rp_table.payload['url'])
  172. response.encoding='iso-8859-1'
  173. df = json.loads(response.read().decode())
  174. # pd.json_normalize()
  175. return df
  176. elif rp_table.payload.get('compressionAlgorithm') is not None and self.marketplace.marketplace_id in ['A1VC38T7YXB528']:
  177. df = pd.read_json(path_or_buf=rp_table.payload['url'], compression={"method": 'gzip'},
  178. encoding='Shift-JIS')
  179. # df.columns =
  180. return df
  181. elif rp_table.payload.get('compressionAlgorithm') is None and self.marketplace.marketplace_id not in ['A1VC38T7YXB528']:
  182. df = pd.read_json(path_or_buf=rp_table.payload.get("url"),encoding='iso-8859-1')
  183. return df
  184. elif rp_table.payload.get('compressionAlgorithm') is None and self.marketplace.marketplace_id in ['A1VC38T7YXB528']:
  185. df = pd.read_json(path_or_buf=rp_table.payload.get("url"),encoding='Shift-JIS')
  186. return df
  187. elif reportId_info.payload.get("processingStatus") in [ProcessingStatus.CANCELLED,ProcessingStatus.FATAL]:
  188. print(reportId_info)
  189. reportDocumentId = reportId_info.payload.get("reportDocumentId")
  190. rp_table = report.get_report_document(reportDocumentId=reportDocumentId, download=True)
  191. print("取消或失败",rp_table)
  192. return pd.DataFrame()
  193. time.sleep(15)
  194. print("please wait...")
  195. # def GET_MERCHANT_LISTINGS_ALL_DATA(self,limit=None): # Not be used
  196. # start = time.time()
  197. # para = {"reportType":ReportType.GET_MERCHANT_LISTINGS_ALL_DATA}
  198. # reportid = self.create_report(**para)
  199. # decom_df = self.decompression(reportid)
  200. # print("连接数据库")
  201. # conn = self.LocalHost_Auth()
  202. # print("连接成功")
  203. # cursor = conn.cursor()
  204. # timezone = "UTC" #pytz.timezone(self.timezone)
  205. # bondary_date = (datetime.now()).strftime("%Y-%m-%d") #+ timedelta(days=-28)
  206. # cursor.execute(f"""select * from amz_sp_api.productInfo where (mainImageUrl is not null and mainImageUrl not in ('', ' ')) and
  207. # (`seller-sku` not in ('',' ') and `seller-sku` is not null) and
  208. # `updateTime`>='{bondary_date}'""") #`seller-sku`,`updateTime`,`mainImageUrl`
  209. # col = [i[0] for i in cursor.description]
  210. # query_rel = cursor.fetchall()
  211. #
  212. # if len(query_rel)!=0:
  213. # print(query_rel[0])
  214. # df = pd.DataFrame(query_rel,columns=col)
  215. # listingid = df['listing-id'].to_numpy().tolist()
  216. # decom_df = decom_df.query("`listing-id` not in @listingid")
  217. # print("数据条数: ",len(decom_df))
  218. # # print(f"delete * from amz_sp_api.productInfo where `listing-id` not in {tuple(listingid)}")
  219. #
  220. # # conn.commit()
  221. #
  222. # if len(decom_df)==0:
  223. # return "Done"
  224. #
  225. # if limit != None:
  226. # decom_df = decom_df.iloc[:limit,:]
  227. # print("getting mainImageInfo...")
  228. # rowcount = 0
  229. # while rowcount < len(decom_df):
  230. # df_insert = decom_df.copy()
  231. # df_insert = df_insert.iloc[rowcount:rowcount + 200, :]
  232. #
  233. # df_insert = self.data_deal(df_insert)
  234. # list_df = df_insert.to_numpy().tolist()
  235. #
  236. # # print(list(conn.query("select * from amz_sp_api.orderReport")))
  237. # sql = f"""
  238. # insert into amz_sp_api.productInfo
  239. # values (%s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s,%s)
  240. # """ #ok
  241. # # print(sql)
  242. # conn = self.LocalHost_Auth()
  243. # cursor = conn.cursor()
  244. # try:
  245. # conn.begin()
  246. # cursor.executemany(sql, list_df)
  247. # conn.commit()
  248. # print("插入中...")
  249. # insert_listingid = df_insert['listing-id'].to_numpy().tolist()
  250. # cursor.execute(f"delete from amz_sp_api.productInfo where `listing-id` not in {tuple(insert_listingid)} and `updateTime`<'{bondary_date}'")
  251. # conn.commit()
  252. # rowcount += 200
  253. # except Exception as e:
  254. # conn.rollback()
  255. # print(e)
  256. # try:
  257. # conn = self.LocalHost_Auth()
  258. # cursor = conn.cursor()
  259. # conn.begin()
  260. # cursor.executemany(sql, list_df)
  261. # conn.commit()
  262. # insert_listingid = df_insert['listing-id'].to_numpy().tolist()
  263. # cursor.execute(f"delete from amz_sp_api.productInfo where `listing-id` not in {tuple(insert_listingid)} and `updateTime`<'{bondary_date}'")
  264. # conn.commit()
  265. # except Exception as e:
  266. # conn.rollback()
  267. # print(e)
  268. # break
  269. # # break
  270. # conn.close()
  271. # print("全部完成")
  272. # end =time.time()
  273. # print("duration:",end-start)
  274. # return decom_df
  275. # def data_deal(self, decom_df, seller_id): # desprecated
  276. # decom_df['mainImageUrl'] = decom_df['seller-sku'].map(lambda x: self.get_mainImage_url(x))
  277. # url_columns = [i for i in decom_df.columns if "url" in i.lower()]
  278. # if len(url_columns) > 0:
  279. # decom_df[url_columns] = decom_df[url_columns].astype("string")
  280. # asin_columns = [i for i in decom_df.columns if 'asin' in i.lower()]
  281. # if len(asin_columns) > 0:
  282. # decom_df[asin_columns] = decom_df[asin_columns].astype("string")
  283. # if 'pending-quantity' in decom_df.columns:
  284. # decom_df['pending-quantity'] = decom_df['pending-quantity'].map(
  285. # lambda x: 0 if pd.isna(x) or np.isinf(x) else x).astype("int32")
  286. # deletecolumns = [i for i in decom_df.columns if 'zshop' in i.lower()]
  287. # decom_df.drop(columns=deletecolumns, inplace=True)
  288. # if 'quantity' in decom_df.columns:
  289. # decom_df['quantity'] = decom_df['quantity'].map(lambda x: 0 if pd.isna(x) or np.isinf(x) else x).astype(
  290. # "int32")
  291. # decom_df['opendate_date'] = decom_df['open-date'].map(lambda x: self.datetime_deal(x))
  292. # if 'add-delete' in decom_df.columns:
  293. # decom_df['add-delete'] = decom_df['add-delete'].astype('string', errors='ignore')
  294. # if 'will-ship-internationally' in decom_df.columns:
  295. # decom_df['will-ship-internationally'] = decom_df['will-ship-internationally'].astype('string',
  296. # errors='ignore')
  297. # if 'expedited-shipping' in decom_df.columns:
  298. # decom_df['expedited-shipping'] = decom_df['expedited-shipping'].astype('string', errors='ignore')
  299. # decom_df['updateTime'] = datetime.now()
  300. # decom_df['timezone'] = "UTC"
  301. # decom_df['seller_id'] = seller_id
  302. # #
  303. # decom_df['item-description'] = decom_df['item-description'].str.slice(0, 500)
  304. # decom_df[decom_df.select_dtypes(float).columns] = decom_df[decom_df.select_dtypes(float).columns].fillna(0.0)
  305. # decom_df[decom_df.select_dtypes(int).columns] = decom_df[decom_df.select_dtypes(int).columns].fillna(0)
  306. # decom_df[decom_df.select_dtypes(datetime).columns] = decom_df[decom_df.select_dtypes(datetime).columns].astype(
  307. # 'string')
  308. # decom_df.fillna('', inplace=True)
  309. # # print(decom_df.info())
  310. # return decom_df
  311. def fba_inventorySQL(self,conn,seller_id):
  312. cursor = conn.cursor()
  313. # 执行语句
  314. cursor.execute(f""" select asin,sku,seller_id,marketplace_id,country_code,quantity,fulfillment_channel
  315. from
  316. (select asin,sku,seller_id,marketplace_id,country_code,quantity,fulfillment_channel,
  317. ROW_NUMBER() over(partition by asin,sku,seller_id,marketplace_id,country_code order by update_datetime desc) as row_
  318. from asj_ads.seller_listings) as t
  319. where row_=1 and seller_id='{seller_id}' and marketplace_id='{self.marketplace.marketplace_id}'
  320. and fulfillment_channel='FBA'
  321. """)#,quantity
  322. query_ = cursor.fetchall()
  323. col_name = [i[0] for i in cursor.description]
  324. df_datatable = pd.DataFrame(query_, columns=col_name)
  325. df_datatable.columns = ['asin_', 'sku_', 'seller_id_', 'marketplace_id_', 'country_code_',
  326. 'afn-fulfillable-quantity']
  327. return df_datatable
  328. def get_fba_neccessary_segment(self,conn,seller_id):
  329. para = {"reportType": ReportType.GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA}
  330. reportid = self.create_report(**para)
  331. df = self.decompression(reportid)
  332. if len(df) == 0:
  333. return self.fba_inventorySQL(conn, seller_id)
  334. # pd.DataFrame()
  335. df['seller_id'] = seller_id
  336. df['marketplace_id'] = self.marketplace.marketplace_id
  337. df['country_code'] = str(self.marketplace)[-2:]
  338. df_rel = df.query("condition=='New'")
  339. df_rel = df_rel.groupby(['asin', 'sku', 'seller_id', 'marketplace_id', 'country_code']).agg(
  340. {'afn-fulfillable-quantity': sum}).reset_index()
  341. df_rel.columns = ['asin_', 'sku_', 'seller_id_', 'marketplace_id_', 'country_code_', 'afn-fulfillable-quantity']
  342. print(f"{seller_id}_{str(self.marketplace)[-2:]}_FBA_Inventory_OK")
  343. return df_rel
  344. def GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA(self,refresh_token,conn=None,seller_id=None,days=-1,**kwargs): # FBA库存信息
  345. try:
  346. return self.get_fba_neccessary_segment(conn,seller_id)
  347. except Exception as e:
  348. print(e)
  349. try:
  350. time.sleep(15)
  351. return self.get_fba_neccessary_segment(conn, seller_id)
  352. except Exception as e:
  353. print(e)
  354. df_rel = pd.DataFrame(columns=['asin_', 'sku_', 'seller_id_', 'marketplace_id_', 'country_code_','afn-fulfillable-quantity'])
  355. return df_rel
  356. def GET_FLAT_FILE_OPEN_LISTINGS_DATA(self,refresh_token,conn=None,seller_id=None,days=-1): # To datatable asj_ads.seller_listings- listing信息,包括fbm库存
  357. para = {"reportType": ReportType.GET_MERCHANT_LISTINGS_ALL_DATA}
  358. reportid = self.create_report(**para)
  359. df = self.decompression(reportid)
  360. if len(df)>0:
  361. if self.marketplace.marketplace_id =='A1VC38T7YXB528': # 该站点的数据列有所不同
  362. df.columns = ['item-name','listing-id','seller-sku','price','quantity','open-date','product-id-type','item-description',
  363. 'item-condition','overseas shipping','fast shipping','asin1','stock_number','fulfillment-channel','merchant-shipping-group','status']
  364. df['seller_id'] = seller_id
  365. df['marketplace_id'] = self.marketplace.marketplace_id
  366. df['country_code'] = str(self.marketplace)[-2:]
  367. if 'fulfilment-channel' in df.columns: # 判断是否存在’fulfilment‘字段(1个film),如果存在则添加一个’fulfillment‘字段(两个fillm)
  368. df['fulfillment-channel'] = df['fulfilment-channel'].copy()
  369. # 如果是amazon,则字段改为FBA
  370. df['fulfillment_channel'] = df['fulfillment-channel'].map(lambda x:"FBA" if not pd.isna(x) and len(x)>0 and str(x)[1:4] in "AMAZON" else x)
  371. # 如果是DEFAULT,则字段该为FBM
  372. df['fulfillment_channel'] = df['fulfillment_channel'].map(lambda x: "FBM" if not pd.isna(x) and len(x)>0 and str(x)[1:4] in "DEFAULT" else x)
  373. if 'asin1' not in df.columns: # 如果不存在asin1,则添加asin1字段
  374. df['asin1'] = ''
  375. if 'product-id' not in df.columns: # 如果不存在product-id,则添加product-id字段
  376. df['product-id'] = ''
  377. # 空值处理
  378. # df['quantity'] = df['quantity'].fillna(0).astype('int64',errors='ignore')
  379. df['quantity'] = df['quantity'].map(lambda x:0 if pd.isna(x)==True else int(x))
  380. #库存数量格式处理
  381. df['quantity'] = df['quantity'].astype('int64')
  382. # 填充NA值
  383. df[['listing-id','seller_id','asin1','seller-sku','country_code','marketplace_id','fulfillment_channel','status','product-id']] = df[['listing-id','seller_id','asin1','seller-sku','country_code','marketplace_id','fulfillment_channel','status','product-id']].fillna('').astype('string',errors='ignore')
  384. # 为NA的价格填充0
  385. df['price'] = df['price'].fillna(0.0).astype('float64',errors='ignore')
  386. df.fillna('',inplace=True)
  387. # 时间处理
  388. df['opendate'] = df['open-date'].map(lambda x: self.datetime_deal(x))
  389. df['update_datetime'] = datetime.now(pytz.UTC).date()
  390. # 保留列
  391. origin_columns = ['listing-id','seller_id',
  392. 'asin1','seller-sku','title','image_link','country_code',
  393. 'marketplace_id','quantity','fulfillment_channel',
  394. 'price','opendate','status','update_datetime','product-id','product-id-type','modifier'
  395. ]
  396. # 连接数据库
  397. conn = SpApiRequest.Data_auth()
  398. cursor = conn.cursor()
  399. # 执行语句,筛选出asin不为空并且product_id不为空的两列唯一数据。
  400. cursor.execute("""select product_id,asin from (select * from asj_ads.seller_listings where asin is not null
  401. and asin<>'' and product_id is not null and product_id <>'') t1 group by product_id,asin""")
  402. query_ = cursor.fetchall()
  403. col_name = [i[0] for i in cursor.description]
  404. df_datatable = pd.DataFrame(query_, columns=col_name)
  405. # 合并数据,左表为新下载的数据,右表为数据库查询的数据
  406. merged_df = df.merge(df_datatable[['product_id','asin']],how='left',left_on='product-id',right_on='product_id')
  407. # 功能函数,提取asin
  408. def func_(asin,asin1,product_id,cred,market_p,seller_id,sku):
  409. if 'B0' in str(product_id)[:3]:
  410. return str(product_id)
  411. if (pd.isna(asin1) or asin1=='') and (pd.isna(asin)==False and asin !=''):
  412. if 'B0' in asin[:3]:
  413. return asin
  414. elif (pd.isna(asin1)==False and asin1!=''):
  415. if 'B0' in asin1[:3]:
  416. return asin1
  417. listingClient = ListingsItems(credentials=cred, marketplace=market_p)
  418. try:
  419. r1 = listingClient.get_listings_item(sellerId=seller_id, sku=sku)
  420. print(r1.payload)
  421. asin = r1.payload.get("summaries")[0].get("asin")
  422. return asin
  423. except Exception as e:
  424. print("获取图片url过程错误重试, 错误message: ", e)
  425. time.sleep(3)
  426. r1 = listingClient.get_listings_item(sellerId=seller_id, sku=sku)
  427. print(r1.payload)
  428. asin = r1.payload.get("summaries")[0].get("asin")
  429. return asin
  430. # 应用处理函数,返回asin
  431. merged_df['asin1'] = merged_df.apply(lambda x:func_(x['asin'],x['asin1'],x['product-id'],self.credentials,self.marketplace,seller_id,x['seller-sku']),axis=1) #x['asin'] if pd.isna(x['asin1']) or x['asin1']=='' else x['asin1']
  432. merged_df['image_link'] = ''
  433. # 暂时将refresh_token添加在title列,后面获取asin详细数据时需要用到
  434. merged_df['title'] = refresh_token
  435. merged_df['modifier'] = ''
  436. # 填充NA值
  437. merged_df.fillna('',inplace=True)
  438. df1 = merged_df.copy()
  439. # df1.to_csv("第一次合并处理后.csv")
  440. #获取FBA库存数据
  441. df_fbaInventory = self.GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA(refresh_token, conn, seller_id, days)
  442. # 合并fba库存数据
  443. if len(df_fbaInventory)>0:
  444. df1 = df1.merge(df_fbaInventory,how='left',left_on=['asin1','seller-sku','seller_id','marketplace_id','country_code'],right_on=['asin_','sku_','seller_id_','marketplace_id_','country_code_'])
  445. df1['quantity'] = df1.apply(lambda x:x['afn-fulfillable-quantity'] if x['fulfillment_channel']=='FBA' or pd.isna(['afn-fulfillable-quantity'])==False else x['quantity'],axis=1)
  446. df1['quantity'] = df1['quantity'].map(lambda x:0 if pd.isna(x) else int(x))
  447. df1['quantity'] = df1['quantity'].fillna(0)
  448. # df1.to_csv("第二次合并处理后的数据.csv")
  449. # 判断更新数据
  450. update_df = self.update_data(df1,seller_id,str(self.marketplace)[-2:],conn)
  451. if len(update_df)==0:
  452. return '无更新数据插入'
  453. # update_df['country_code'] = update_df['country_code'].map({"GB":"UK"})
  454. conn = SpApiRequest.Data_auth()
  455. cursor = conn.cursor()
  456. # 插入更新的数据
  457. try:
  458. insertsql = """insert into
  459. asj_ads.seller_listings(listing_id,seller_id,asin,sku,title,image_link,country_code,marketplace_id,quantity,
  460. fulfillment_channel,price,launch_datetime,status,update_datetime,product_id,product_id_type,modifier)
  461. values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
  462. conn.begin()
  463. cursor.executemany(insertsql,tuple(update_df[origin_columns].to_numpy().tolist()))
  464. conn.commit()
  465. print("插入完成")
  466. delete_sql = f"""delete from asj_ads.seller_listings where update_datetime<'{(datetime.today()+timedelta(days=-3)).date().isoformat()}'"""
  467. cursor.execute(delete_sql)
  468. conn.commit()
  469. return '插入完成'
  470. except Exception as e:
  471. print("插入错误:",e)
  472. conn.rollback()
  473. return '出错回滚'
  474. return ''
  475. # def get_listing_info(self, sku,seller_id): # desprecated
  476. # listingClient = ListingsItems(credentials=self.credentials, marketplace=self.marketplace)
  477. # try:
  478. # r1 = listingClient.get_listings_item(sellerId=seller_id, sku=sku)
  479. # # print(r1.payload)
  480. # json_content = r1.payload.get("summaries")[0]
  481. # item_name = json_content.get("itemName")
  482. # item_name ='###' if item_name==None else item_name
  483. # img = json_content.get("mainImage")
  484. # img_url = '###' if img is None else img.get("link")
  485. # return str(img_url)+"-----"+ str(item_name)
  486. # except Exception as e:
  487. # try:
  488. # print("获取图片url过程错误重试, 错误message: ",e)
  489. # time.sleep(3)
  490. # r1 = listingClient.get_listings_item(sellerId=seller_id, sku=sku)
  491. # print(r1.payload)
  492. # json_content = r1.payload.get("summaries")[0]
  493. #
  494. # item_name = json_content.get("itemName")
  495. # item_name = '###' if item_name == None else item_name
  496. # img = json_content.get("mainImage")
  497. # img_url = '###' if img is None else img.get("link")
  498. # return str(img_url)+"-----"+ str(item_name)
  499. # except Exception as e:
  500. # print(e)
  501. # return "###-----###"
  502. def datetime_deal(self,timestring): # used in GET_FLAT_FILE_OPEN_LISTINGS_DATA, time deal -时间处理函数
  503. timezone_ = {"AEST":"Australia/Sydney","AEDT":"Australia/Sydney","PST":"America/Los_Angeles",
  504. "PDT":"America/Los_Angeles","CST":"America/Chicago","CDT":"America/Chicago",
  505. "MET":"MET","MEST":"MET","BST":"Europe/London","GMT":"GMT","CET":"CET",
  506. "CEST":"CET","JST":"Asia/Tokyo","BRT":"America/Sao_Paulo"}
  507. date_list = str.split(timestring,sep = ' ')
  508. if len(date_list)<3:
  509. try:
  510. return datetime.strptime(date_list[0],"%Y-%m-%d")
  511. except:
  512. try:
  513. return datetime.strptime(date_list[0], "%Y/%m/%d")
  514. except:
  515. try:
  516. return datetime.strptime(date_list[0], "%d/%m/%Y")
  517. except Exception as e:
  518. print(e)
  519. return datetime(1999, 12, 31, 0, 0, 0)
  520. try:
  521. time_date = datetime.strptime(date_list[0]+date_list[1],"%Y-%m-%d%H:%M:%S")
  522. timezone = pytz.timezone(timezone_[date_list[2]])
  523. time_ = timezone.localize(time_date)
  524. return time_.astimezone(pytz.UTC)
  525. except:
  526. try:
  527. time_date = datetime.strptime(date_list[0] + date_list[1], "%d/%m/%Y%H:%M:%S")
  528. timezone = pytz.timezone(timezone_[date_list[2]])
  529. time_ = timezone.localize(time_date)
  530. return time_.astimezone(pytz.UTC)
  531. except :
  532. try:
  533. time_date = datetime.strptime(date_list[0] + date_list[1], "%Y/%m/%d%H:%M:%S")
  534. timezone = pytz.timezone(timezone_[date_list[2]])
  535. time_ = timezone.localize(time_date)
  536. return time_.astimezone(pytz.UTC)
  537. except Exception as e1:
  538. print(e1)
  539. return datetime(1999,12,31,0,0,0)
  540. def update_data(self,df,seller_id,country_code,conn): # used in GET_FLAT_FILE_OPEN_LISTINGS_DATA, data compare
  541. conn = SpApiRequest.Data_auth()
  542. cursor = conn.cursor()
  543. columns = ['listing-id', 'seller_id',
  544. 'asin1', 'seller-sku', 'title', 'image_link', 'country_code',
  545. 'marketplace_id', 'quantity', 'fulfillment_channel',
  546. 'price', 'opendate', 'status', 'update_datetime', 'product-id', 'product-id-type','modifier'
  547. ]
  548. if country_code=='GB':
  549. country_code="UK"
  550. df['country_code'] = "UK"
  551. df_data = pd.DataFrame(columns=columns)
  552. delete_list = []
  553. marketplace_id = self.marketplace.marketplace_id
  554. try:
  555. cursor.execute(f"""select * from
  556. asj_ads.seller_listings where seller_id='{seller_id}' and marketplace_id='{marketplace_id}'""")
  557. col = [i[0] for i in cursor.description]
  558. query_rel = cursor.fetchall()
  559. df_rel = pd.DataFrame(query_rel, columns=col)
  560. # df_rel.to_csv("数据库数据.csv")
  561. #数据库数据
  562. df_rel['quantity'] = df_rel['quantity'].fillna(0).astype('int64')
  563. df_rel['price'] = df_rel['price'].fillna(0.0).astype('float64')
  564. df_rel['product_id_type'] = df_rel['product_id_type'].astype('int64')
  565. # 新数据
  566. df['update_datetime'] =df['update_datetime'].astype('datetime64[ns]')
  567. df['quantity'] = df['quantity'].fillna(0).astype('int64')
  568. df['price']= df['price'].fillna(0.0).astype('float64')
  569. row = 0
  570. while row < len(df):
  571. temp_df = df.iloc[row, :]
  572. temp_d = df.iloc[row:row+1, :]
  573. listing_id = temp_df['listing-id']
  574. asin = temp_df['asin1']
  575. sku = temp_df['seller-sku']
  576. quantity = temp_df['quantity']
  577. fulfillment_channel = temp_df['fulfillment_channel']
  578. price = temp_df['price']
  579. product_id = temp_df['product-id']
  580. title = temp_df['title']
  581. imageurl = temp_df['image_link']
  582. modifier = temp_df['modifier']
  583. temp = df_rel.query("""listing_id==@listing_id and asin==@asin and sku==@sku and quantity==@quantity and fulfillment_channel==@fulfillment_channel and price==@price and product_id==@product_id and country_code==@country_code and seller_id==@seller_id and title==@title and image_link==@imageurl and modifier==@modifier""")
  584. # print("需要关注数据(是否异常):",len(temp),temp.to_numpy().tolist()) if len(temp)>1 else 1
  585. if len(temp)>1:
  586. # temp = temp.head(1).to_numpy().tolist()
  587. df_data = pd.concat((df_data,temp_d),ignore_index=True) #df_data.append(temp_df, ignore_index=True)
  588. delete_list.append((seller_id, marketplace_id, sku, listing_id, product_id))
  589. # print(len(temp))
  590. elif len(temp)==0:
  591. df_data = pd.concat((df_data,temp_d),ignore_index=True)
  592. delete_list.append((seller_id,marketplace_id,sku,listing_id,product_id))
  593. row += 1
  594. print("判断不同数据条数",len(delete_list))
  595. print("预计更新数据条数",len(df_data))
  596. try:
  597. # print(tuple(delete_list))
  598. if len(delete_list)>0:
  599. query = f"""delete from asj_ads.seller_listings
  600. where (seller_id,marketplace_id,sku,listing_id,product_id) in %s""" #where (seller_id,country_code) in %s"""
  601. cursor.execute(query,(delete_list,))
  602. conn.commit()
  603. # print(delete_list)
  604. print("进行中...")
  605. except Exception as e:
  606. print(e)
  607. conn.rollback()
  608. return df_data
  609. except Exception as e:
  610. print("错误:", e)
  611. return df
  612. # def GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE(self,seller_id,days=-2): # not be used,退货报告,空数据
  613. # shopReportday = (datetime.now() + timedelta(days=days)).strftime("%Y-%m-%d")
  614. # # print(shopReportday)
  615. # para = {"reportType": ReportType.GET_SELLER_FEEDBACK_DATA,
  616. # "dataStartTime": shopReportday, "dataEndTime": shopReportday,
  617. # }
  618. # reportid = self.create_report(**para) # {"ShowSalesChannel":"true"}
  619. # decom_df = self.decompression(reportid)
  620. # print(decom_df)
  621. # print(decom_df.columns)
  622. def GET_SALES_AND_TRAFFIC_REPORT(self, refresh_token,seller_id,days=-2,**kwargs): # To datatable asj_ads.SalesAndTrafficByAsin,销售流量表
  623. # ,level:Literal["PARENT","CHILD","SKU"]="PARENT")
  624. level = "PARENT" if len(kwargs.get("level"))==0 else kwargs.get("level")
  625. countryCode = None if kwargs.get("countryCode")==None else kwargs.get("countryCode")
  626. # print(level)
  627. shopReportday = (datetime.now() + timedelta(days=days)).strftime("%Y-%m-%d")
  628. print(shopReportday,countryCode,seller_id)
  629. try:
  630. conn = self.Data_auth()
  631. cursor = conn.cursor()
  632. except:
  633. time.sleep(5)
  634. conn = self.Data_auth()
  635. cursor = conn.cursor()
  636. if level == 'SKU':
  637. query_judge = f"""select count(*) from asj_ads.SalesAndTrafficByAsin where seller_id='{seller_id}' and data_date='{shopReportday}' and countryCode='{countryCode}' and childAsin is not Null and sku is not Null"""
  638. elif level == 'CHILD':
  639. query_judge = f"""select count(*) from asj_ads.SalesAndTrafficByAsin where seller_id='{seller_id}' and data_date='{shopReportday}' and countryCode='{countryCode}' and sku is null and childAsin is not null"""
  640. elif level == 'PARENT':
  641. query_judge = f"""select count(*) from asj_ads.SalesAndTrafficByAsin where seller_id='{seller_id}' and data_date='{shopReportday}' and countryCode='{countryCode}' and sku is null and childAsin is null"""
  642. else:
  643. return ''
  644. print(query_judge)
  645. cursor.execute(query_judge)
  646. rel = cursor.fetchall()
  647. # print()
  648. if rel[0][0]!=0:
  649. return '已存在'
  650. # print(shopReportday)
  651. para = {"reportType": ReportType.GET_SALES_AND_TRAFFIC_REPORT,
  652. "dataStartTime": shopReportday, "dataEndTime": shopReportday,
  653. "reportOptions":{"dateGranularity":"DAY","asinGranularity":level}
  654. }
  655. reportid = self.create_report(**para) # {"ShowSalesChannel":"true"}
  656. decom_df = self.decompression(reportid)
  657. # print(decom_df.columns[0])
  658. data_rel = self.sales_traffic_datadeal(decom_df.columns[0],seller_id,countryCode)
  659. try:
  660. conn = self.Data_auth()
  661. cursor = conn.cursor()
  662. except:
  663. time.sleep(5)
  664. conn = self.Data_auth()
  665. cursor = conn.cursor()
  666. # print(list(conn.query("select * from amz_sp_api.orderReport")))
  667. sql = f"""
  668. insert into asj_ads.SalesAndTrafficByAsin(data_date,data_marketpalceId,parent_asin,
  669. childAsin,sku,sBA_unitsOrdered,sBA_unitsOrderedB2B,sBA_amount,
  670. currencyCode,totalOrderItems,totalOrderItemsB2B,tBA_browserSessions,
  671. tBA_browserSessionsB2B,tBA_mobileAppSessions,tBA_mobileAppSessionsB2B,
  672. tBA_sessions,tBA_sessionsB2B,tBA_browserSessionPercentage,
  673. tBA_browserSessionPercentageB2B,tBA_mobileAppSessionPercentage,
  674. tBA_mobileAppSessionPercentageB2B,tBA_sessionPercentage,
  675. tBA_sessionPercentageB2B,tBA_browserPageViews,tBA_browserPageViewsB2B,
  676. tBA_mobileAppPageViews,tBA_mobileAppPageViewsB2B,tBA_pageViews,
  677. tBA_pageViewsB2B,tBA_browserPageViewsPercentage,tBA_browserPageViewsPercentageB2B,
  678. tBA_mobileAppPageViewsPercentage,tBA_mobileAppPageViewsPercentageB2B,
  679. tBA_pageViewsPercentage,tBA_pageViewsPercentageB2B,tBA_buyBoxPercentage,
  680. tBA_buyBoxPercentageB2B,tBA_unitSessionPercentage,tBA_unitSessionPercentageB2B,seller_id,countryCode)
  681. values (%s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s)
  682. """ # ok
  683. try:
  684. # TODO
  685. conn.begin()
  686. cursor.executemany(sql, data_rel)
  687. conn.commit()
  688. print("插入完成")
  689. conn.close()
  690. time.sleep(1)
  691. except Exception as e:
  692. conn.rollback()
  693. print(e)
  694. def sales_traffic_datadeal(self,data,seller_id,countryCode): # used in GET_SALES_AND_TRAFFIC_REPORT
  695. data = eval(data)
  696. if len(data['salesAndTrafficByAsin'])==0:
  697. return []
  698. data_list = []
  699. data_date = data["reportSpecification"]["dataEndTime"]
  700. data_marketpalceId = data["reportSpecification"]["marketplaceIds"][0]
  701. # print(data_marketpalceId)
  702. for single_item in data["salesAndTrafficByAsin"]:
  703. # print(single_item)
  704. parent_asin = single_item.get("parentAsin")
  705. childAsin = single_item.get("childAsin")
  706. sku = single_item.get("sku")
  707. salesByAsin = single_item.get("salesByAsin")
  708. # if salesByAsin is not None:
  709. sBA_unitsOrdered = salesByAsin.get("unitsOrdered") if salesByAsin is not None else ''
  710. sBA_unitsOrderedB2B = salesByAsin.get("unitsOrderedB2B") if salesByAsin is not None else ''
  711. orderedProductSales = salesByAsin.get("orderedProductSales")
  712. sBA_amount = orderedProductSales.get("amount") if orderedProductSales is not None else ''
  713. currencyCode = orderedProductSales.get("currencyCode") if orderedProductSales is not None else ''
  714. orderedProductSalesB2B = salesByAsin.get("orderedProductSalesB2B") if salesByAsin is not None else None
  715. # if orderedProductSalesB2B is not None:
  716. oPS_amount = orderedProductSalesB2B.get("amount") if orderedProductSalesB2B is not None else ''
  717. totalOrderItems = salesByAsin.get("totalOrderItems") if salesByAsin is not None else ''
  718. totalOrderItemsB2B = salesByAsin.get("totalOrderItemsB2B") if salesByAsin is not None else ''
  719. trafficByAsin = single_item.get("trafficByAsin")
  720. # if trafficByAsin is not None:
  721. tBA_browserSessions = trafficByAsin.get("browserSessions") if trafficByAsin is not None else ''
  722. tBA_browserSessionsB2B = trafficByAsin.get("browserSessionsB2B") if trafficByAsin is not None else ''
  723. tBA_mobileAppSessions = trafficByAsin.get("mobileAppSessions") if trafficByAsin is not None else ''
  724. tBA_mobileAppSessionsB2B = trafficByAsin.get("mobileAppSessionsB2B") if trafficByAsin is not None else ''
  725. tBA_sessions = trafficByAsin.get("sessions") if trafficByAsin is not None else ''
  726. tBA_sessionsB2B = trafficByAsin.get("sessionsB2B") if trafficByAsin is not None else ''
  727. tBA_browserSessionPercentage = trafficByAsin.get("browserSessionPercentage") if trafficByAsin is not None else ''
  728. tBA_browserSessionPercentageB2B = trafficByAsin.get("browserSessionPercentageB2B") if trafficByAsin is not None else ''
  729. tBA_mobileAppSessionPercentage = trafficByAsin.get("mobileAppSessionPercentage") if trafficByAsin is not None else ''
  730. tBA_mobileAppSessionPercentageB2B = trafficByAsin.get("mobileAppSessionPercentageB2B") if trafficByAsin is not None else ''
  731. tBA_sessionPercentage = trafficByAsin.get("sessionPercentage") if trafficByAsin is not None else ''
  732. tBA_sessionPercentageB2B = trafficByAsin.get("sessionPercentageB2B") if trafficByAsin is not None else ''
  733. tBA_browserPageViews = trafficByAsin.get("browserPageViews") if trafficByAsin is not None else ''
  734. tBA_browserPageViewsB2B = trafficByAsin.get("browserPageViewsB2B") if trafficByAsin is not None else ''
  735. tBA_mobileAppPageViews = trafficByAsin.get("mobileAppPageViews") if trafficByAsin is not None else ''
  736. tBA_mobileAppPageViewsB2B = trafficByAsin.get("mobileAppPageViewsB2B") if trafficByAsin is not None else ''
  737. tBA_pageViews = trafficByAsin.get("pageViews") if trafficByAsin is not None else ''
  738. tBA_pageViewsB2B = trafficByAsin.get("pageViewsB2B") if trafficByAsin is not None else ''
  739. tBA_browserPageViewsPercentage = trafficByAsin.get("browserPageViewsPercentage") if trafficByAsin is not None else ''
  740. tBA_browserPageViewsPercentageB2B = trafficByAsin.get("browserPageViewsPercentageB2B") if trafficByAsin is not None else ''
  741. tBA_mobileAppPageViewsPercentage = trafficByAsin.get("mobileAppPageViewsPercentage") if trafficByAsin is not None else ''
  742. tBA_mobileAppPageViewsPercentageB2B = trafficByAsin.get("mobileAppPageViewsPercentageB2B") if trafficByAsin is not None else ''
  743. tBA_pageViewsPercentage = trafficByAsin.get("pageViewsPercentage") if trafficByAsin is not None else ''
  744. tBA_pageViewsPercentageB2B = trafficByAsin.get("pageViewsPercentageB2B") if trafficByAsin is not None else ''
  745. tBA_buyBoxPercentage = trafficByAsin.get("buyBoxPercentage") if trafficByAsin is not None else ''
  746. tBA_buyBoxPercentageB2B = trafficByAsin.get("buyBoxPercentageB2B") if trafficByAsin is not None else ''
  747. tBA_unitSessionPercentage = trafficByAsin.get("unitSessionPercentage") if trafficByAsin is not None else ''
  748. tBA_unitSessionPercentageB2B = trafficByAsin.get("unitSessionPercentageB2B") if trafficByAsin is not None else ''
  749. data_list.append([data_date,data_marketpalceId,parent_asin,
  750. childAsin,sku,sBA_unitsOrdered,sBA_unitsOrderedB2B,sBA_amount,
  751. currencyCode,totalOrderItems,totalOrderItemsB2B,tBA_browserSessions,
  752. tBA_browserSessionsB2B,tBA_mobileAppSessions,tBA_mobileAppSessionsB2B,
  753. tBA_sessions,tBA_sessionsB2B,tBA_browserSessionPercentage,
  754. tBA_browserSessionPercentageB2B,tBA_mobileAppSessionPercentage,
  755. tBA_mobileAppSessionPercentageB2B,tBA_sessionPercentage,
  756. tBA_sessionPercentageB2B,tBA_browserPageViews,tBA_browserPageViewsB2B,
  757. tBA_mobileAppPageViews,tBA_mobileAppPageViewsB2B,tBA_pageViews,
  758. tBA_pageViewsB2B,tBA_browserPageViewsPercentage,tBA_browserPageViewsPercentageB2B,
  759. tBA_mobileAppPageViewsPercentage,tBA_mobileAppPageViewsPercentageB2B,
  760. tBA_pageViewsPercentage,tBA_pageViewsPercentageB2B,tBA_buyBoxPercentage,
  761. tBA_buyBoxPercentageB2B,tBA_unitSessionPercentage,tBA_unitSessionPercentageB2B,seller_id,countryCode
  762. ])
  763. # print(data_list)
  764. return data_list
  765. def GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL(self, refresh_token,seller_id,days=-1,**kwargs): # To datatable asj_ads.orderReport_ - 获取订单报告
  766. countryCode = None if kwargs.get("countryCode")==None else kwargs.get("countryCode")
  767. shopReportday = (datetime.now() + timedelta(days=days)).strftime("%Y-%m-%d")
  768. print(shopReportday)
  769. try:
  770. conn = self.Data_auth()
  771. cursor = conn.cursor()
  772. except:
  773. time.sleep(5)
  774. conn = self.Data_auth()
  775. cursor = conn.cursor()
  776. query_judge = f"""select count(*) from asj_ads.orderReport_ where ReportDate='{shopReportday}' and country_code='{countryCode}'"""
  777. print(query_judge)
  778. cursor.execute(query_judge)
  779. rel = cursor.fetchall()
  780. # print()
  781. if rel[0][0]!=0:
  782. print("已存在")
  783. return '已存在'
  784. para = {"reportType": ReportType.GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL,
  785. "dataStartTime": shopReportday, "dataEndTime": shopReportday,
  786. "reportOptions": {"ShowSalesChannel": "true"}}
  787. reportid = self.create_report(**para) # {"ShowSalesChannel":"true"}
  788. decom_df = self.decompression(reportid)
  789. decom_df[decom_df.select_dtypes(float).columns] = decom_df[decom_df.select_dtypes(float).columns].fillna(0.0)
  790. decom_df[decom_df.select_dtypes(int).columns] = decom_df[decom_df.select_dtypes(int).columns].fillna(0)
  791. decom_df[decom_df.select_dtypes(datetime).columns] = decom_df[decom_df.select_dtypes(datetime).columns].astype(
  792. 'string')
  793. if "purchase-order-number" in decom_df.columns:
  794. decom_df['purchase-order-number'] = decom_df['purchase-order-number'].astype("string")
  795. decom_df.fillna('', inplace=True)
  796. # decom_df.to_csv('order.csv')
  797. decom_df["ReportDate"] = parse(shopReportday)
  798. # decom_df['timezone'] = decom_df["purchase-date"].map(lambda x: parse(x).tzname()).fillna(method='bfill')
  799. decom_df['timezone'] = "UTC"
  800. print("==========================================================")
  801. decom_df[["purchase-date", "last-updated-date"]] = decom_df[["purchase-date", "last-updated-date"]].applymap(
  802. lambda x: self.timeDeal(x) if pd.isna(x) == False or x != None else x)
  803. if 'is-business-order' not in decom_df.columns:
  804. decom_df['is-business-order'] = None
  805. if 'purchase-order-number' not in decom_df.columns:
  806. decom_df['purchase-order-number'] = '-'
  807. if 'price-designation' not in decom_df.columns:
  808. decom_df['price-designation'] = '-'
  809. decom_df['seller_id'] = seller_id
  810. country_code = str(self.marketplace)[-2:]
  811. if country_code == 'GB':
  812. country_code = "UK"
  813. # decom_df['country_code'] = "UK"
  814. decom_df['country_code'] = country_code
  815. decom_df['insert_time'] = datetime.now()
  816. # print(decom_df[])
  817. reserve_columns = ["amazon-order-id", "merchant-order-id", "purchase-date", "last-updated-date", "order-status",
  818. "fulfillment-channel", "sales-channel", "order-channel", "ship-service-level",
  819. "product-name",
  820. "sku", "asin", "item-status", "quantity", "currency", "item-price", "item-tax",
  821. "shipping-price",
  822. "shipping-tax", "gift-wrap-price", "gift-wrap-tax", "item-promotion-discount",
  823. "ship-promotion-discount", "ship-city", "ship-state", "ship-postal-code", "ship-country",
  824. "promotion-ids", "is-business-order", "purchase-order-number", "price-designation",
  825. "ReportDate",
  826. "timezone", "seller_id", "country_code",'insert_time'
  827. ]
  828. list_df = decom_df[reserve_columns].to_numpy().tolist()
  829. try:
  830. conn = self.Data_auth()
  831. cursor = conn.cursor()
  832. except:
  833. time.sleep(5)
  834. conn = self.Data_auth()
  835. cursor = conn.cursor()
  836. # print(list(conn.query("select * from amz_sp_api.orderReport")))
  837. sql = f"""
  838. insert into asj_ads.orderReport_(`amazon_order_id`, `merchant_order_id`, `purchase_date`, `last_updated_date`, `order_status`,
  839. `fulfillment_channel`, `sales_channel`, `order_channel`, `ship_service_level`,
  840. `product_name`,
  841. `sku`, `asin`, `item_status`, `quantity`, `currency`, `item_price`, `item_tax`,
  842. `shipping_price`,
  843. `shipping_tax`, `gift_wrap_price`, `gift_wrap_tax`, `item_promotion_discount`,
  844. `ship_promotion_discount`, `ship_city`, `ship_state`, `ship_postal_code`, `ship_country`,
  845. `promotion_ids`, `is_business_order`, `purchase_order_number`, `price_designation`,
  846. `ReportDate`,
  847. `timezone`, `seller_id`, `country_code`,`insert_time`)
  848. values (%s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s, %s,%s,%s,%s,%s,%s,%s,%s)
  849. """ # ok
  850. try:
  851. conn.begin()
  852. cursor.executemany(sql, list_df)
  853. conn.commit()
  854. print("插入完成")
  855. conn.close()
  856. time.sleep(1)
  857. except Exception as e:
  858. conn.rollback()
  859. print(e)
  860. def timeDeal(self, orgTime): # time-deal
  861. orgTime = parse(orgTime)
  862. timezone = pytz.timezone("UTC")
  863. shopTime = orgTime.astimezone(timezone)
  864. shopTime_datetime = datetime(shopTime.year, shopTime.month, shopTime.day, shopTime.hour, shopTime.minute,
  865. shopTime.second)
  866. return shopTime_datetime
  867. @classmethod
  868. def listing_infoTable(cls): # To datatable asj_ads.Goods - Goods表,包括排名,图片,父子关系
  869. conn = SpApiRequest.Data_auth()
  870. cursor = conn.cursor()
  871. #
  872. cursor.execute(f"""select seller_id,country_code,asin,title from asj_ads.seller_listings where title is not null and title <>'' and (seller_id,country_code,asin) not in (select seller_id,countryCode,asin from asj_ads.Goods where update_time>='{datetime.today().date()}') group by seller_id,title,country_code,asin order by country_code desc""")
  873. query_ = cursor.fetchall()
  874. # print(query_)
  875. col_name = [i[0] for i in cursor.description]
  876. df_datatable = pd.DataFrame(query_, columns=col_name)
  877. count=0
  878. distance = 50
  879. print(len(df_datatable))
  880. while count<len(df_datatable):
  881. print(f"进度:{round(count / len(df_datatable) * 100, 2)}%")
  882. df = df_datatable.iloc[count:count+distance,:]
  883. count = count+distance
  884. df['detail_info'] = df.apply(lambda x: cls.get_listing_info01(x['title'],x['country_code'],x['asin'],x['seller_id']),axis=1)
  885. detail_info_k = df['detail_info'].map(lambda x: list(x.keys())).to_numpy().tolist()
  886. detail_info = df['detail_info'].to_numpy().tolist() #df['detail_info'].map(lambda x: list(x.values())).to_numpy().tolist()
  887. conn = SpApiRequest.Data_auth()
  888. print(count)
  889. SpApiRequest.Goods_insert(conn,detail_info,detail_info_k)
  890. if count%distance==0:
  891. cursor = conn.cursor()
  892. cursor.execute(
  893. f"""select seller_id,countryCode,asin from asj_ads.Goods where update_time>='{datetime.today().date()}'""")
  894. query_d = cursor.fetchall()
  895. # print(query_d)
  896. try:
  897. # print(tuple(delete_list))
  898. query = f"""delete from asj_ads.Goods where update_time<'{datetime.today().date()}'
  899. and (seller_id,countryCode,asin) in {query_d}
  900. """ #where (seller_id,country_code) in %s"""
  901. cursor.execute(query)
  902. conn.commit()
  903. # print(delete_list)
  904. # print("进行中...")
  905. except Exception as e:
  906. print(e)
  907. conn.rollback()
  908. print("Success")
  909. conn.close()
  910. @classmethod
  911. def Goods_drop_duplicates(cls):
  912. conn = SpApiRequest.Data_auth()
  913. cursor = conn.cursor()
  914. cursor.execute(
  915. f"""select seller_id,countryCode,asin as count_ from asj_ads.Goods group by seller_id,countryCode,asin having count(asin)>=2""")
  916. query_ = cursor.fetchall()
  917. print(len(query_))
  918. if len(query_)>0:
  919. query_sql2 = cursor.execute(
  920. f"""select distinct main_image, productTypes, BigCat_rank, BigCat_title, SmallCat_rank, SmallCat_title, brandName, browseNode, itemName, IsParent, parent_asin, asin, countryCode, marketplace_id, seller_id, update_time from asj_ads.Goods where (seller_id,countryCode,asin) in {query_}""")#
  921. query_2 = cursor.fetchall()
  922. try:
  923. query = f"""delete from asj_ads.Goods where (seller_id,countryCode,asin) in {query_}
  924. """ # where (seller_id,country_code) in %s"""
  925. cursor.execute(query)
  926. conn.commit()
  927. except Exception as e:
  928. print("错误过程1",e)
  929. conn.rollback()
  930. sql = """insert into
  931. asj_ads.Goods(main_image, productTypes, BigCat_rank, BigCat_title, SmallCat_rank, SmallCat_title, brandName, browseNode, itemName, IsParent, parent_asin, asin, countryCode, marketplace_id, seller_id, update_time)
  932. values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
  933. try:
  934. conn.begin()
  935. cursor.executemany(sql, query_2)
  936. conn.commit()
  937. print("插入完成")
  938. conn.close()
  939. time.sleep(1)
  940. except Exception as e:
  941. conn.rollback()
  942. print("错误过程2",e)
  943. @staticmethod
  944. def get_listing_info01(refresh_token, countryCode, asin, seller_id): # used in listing_infoTable
  945. # print(refresh_token)
  946. aws_credentials = {
  947. 'refresh_token': refresh_token,
  948. 'lwa_app_id': 'amzn1.application-oa2-client.1f9d3d4747e14b22b4b598e54e6b922e', # 卖家中心里面开发者资料LWA凭证
  949. 'lwa_client_secret': 'amzn1.oa2-cs.v1.13ebfa8ea7723b478215a2b61f4dbf5ca6f6927fa097c2dd0941a66d510aca7f',
  950. 'aws_access_key': 'AKIARBAGHTGOZC7544GN',
  951. 'aws_secret_key': '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',
  952. 'role_arn': 'arn:aws:iam::070880041373:role/Amazon_SP_API_ROLE'
  953. }
  954. mak = {'AE': Marketplaces.AE, 'BE': Marketplaces.BE, 'DE': Marketplaces.DE,
  955. 'PL': Marketplaces.PL, 'EG': Marketplaces.EG, 'ES': Marketplaces.ES,
  956. 'FR': Marketplaces.FR, 'GB': Marketplaces.GB, 'IN': Marketplaces.IN,
  957. 'IT': Marketplaces.IT, 'NL': Marketplaces.NL, 'SA': Marketplaces.SA,
  958. 'SE': Marketplaces.SE, 'TR': Marketplaces.TR, 'UK': Marketplaces.UK,
  959. 'AU': Marketplaces.AU, 'JP': Marketplaces.JP, 'SG': Marketplaces.SG,
  960. 'US': Marketplaces.US,
  961. 'BR': Marketplaces.BR, 'CA': Marketplaces.CA, 'MX': Marketplaces.MX
  962. }
  963. cate_item = CatalogItems(credentials=aws_credentials, marketplace=mak[countryCode],version='2022-04-01')
  964. try:
  965. variations_info = SpApiRequest.variations_judge(cate_item, asin)
  966. except:
  967. time.sleep(2.5)
  968. variations_info = SpApiRequest.variations_judge(cate_item, asin)
  969. try:
  970. cate_item = CatalogItems(credentials=aws_credentials, marketplace=mak[countryCode], version='2020-12-01')
  971. detail_info = SpApiRequest.get_detail_cat(cate_item, asin, mak, countryCode)
  972. except:
  973. time.sleep(2.5)
  974. cate_item = CatalogItems(credentials=aws_credentials, marketplace=mak[countryCode], version='2020-12-01')
  975. detail_info = SpApiRequest.get_detail_cat(cate_item, asin, mak, countryCode)
  976. detail_info.update(variations_info)
  977. detail_info['asin'] = asin
  978. detail_info['countryCode'] = countryCode
  979. detail_info['marketplace_id'] = mak[countryCode].marketplace_id
  980. detail_info['seller_id'] = seller_id
  981. detail_info['update_time'] = datetime.now()
  982. return detail_info
  983. @staticmethod
  984. def variations_judge(cate_item, asin): # used in listing_infoTable, 判断是否有父子关系
  985. def temp_func(cate_item, asin):
  986. variations = cate_item.get_catalog_item(asin=asin, **{"includedData": ['relationships']}) # 'variations',
  987. var_info = variations.payload
  988. # print(variations)
  989. IsParent = 'Y'
  990. parent_asin = ''
  991. try:
  992. relationships = var_info.get("relationships")[0]['relationships']
  993. except:
  994. relationships = []
  995. if len(relationships) > 0:
  996. variationType = relationships[0]['type']
  997. if relationships[0].get('parentAsins') is not None:
  998. parent_asin = relationships[0]['parentAsins'][0]
  999. IsParent = 'N'
  1000. elif relationships[0].get('childAsins') is not None:
  1001. IsParent = 'Y'
  1002. parent_asin = asin
  1003. else:
  1004. parent_asin = 'Erro_01'
  1005. else:
  1006. IsParent = 'SG'
  1007. parent_asin = asin
  1008. print(IsParent, '父asin:', parent_asin, '子asin', asin)
  1009. return {"IsParent": IsParent, "parent_asin": parent_asin}
  1010. try:
  1011. return temp_func(cate_item, asin)
  1012. except:
  1013. try:
  1014. time.sleep(12)
  1015. return temp_func(cate_item, asin)
  1016. except Exception as e:
  1017. print("判断是否为父子asin时出错:", e)
  1018. return {"IsParent": 'Erro', "parent_asin": 'Erro'}
  1019. @staticmethod
  1020. def Goods_insert(conn,detail_info,detail_info_k): # To datatable asj.Goods
  1021. # print(detail_info)
  1022. df = pd.DataFrame(detail_info)
  1023. # print(df.columns)
  1024. try:
  1025. cursor = conn.cursor()
  1026. except:
  1027. time.sleep(2.5)
  1028. conn = SpApiRequest.Data_auth()
  1029. cursor = conn.cursor()
  1030. query_sql = "select * from asj_ads.Goods"
  1031. cursor.execute(query_sql)
  1032. col = [i[0] for i in cursor.description]
  1033. query_rel = cursor.fetchall()
  1034. df_query = pd.DataFrame(query_rel, columns=col)
  1035. merge_df = df.merge(df_query,how='left',on=['asin', 'countryCode', 'marketplace_id', 'seller_id'],suffixes=['','_right'])
  1036. merge_df['IsParent'] = merge_df.apply(lambda x:x['IsParent_right'] if x['IsParent']=='Erro' else x['IsParent'],axis=1)
  1037. merge_df['parent_asin'] = merge_df.apply(lambda x: x['parent_asin_right'] if x['parent_asin'] == 'Erro' else x['parent_asin'], axis=1)
  1038. merge_df = merge_df.query("IsParent!='Erro' and parent_asin!='Erro'")
  1039. detail_info_value = merge_df[['main_image', 'productTypes', 'BigCat_rank', 'BigCat_title', 'SmallCat_rank',
  1040. 'SmallCat_title', 'brandName', 'browseNode', 'itemName', 'IsParent', 'parent_asin',
  1041. 'asin', 'countryCode', 'marketplace_id', 'seller_id', 'update_time']].to_numpy().tolist()
  1042. try:
  1043. insertsql = """insert into
  1044. asj_ads.Goods(main_image, productTypes, BigCat_rank, BigCat_title, SmallCat_rank, SmallCat_title, brandName, browseNode, itemName, IsParent, parent_asin, asin, countryCode, marketplace_id, seller_id, update_time)
  1045. values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
  1046. conn.begin()
  1047. cursor.executemany(insertsql, tuple(detail_info_value))
  1048. conn.commit()
  1049. print("插入完成Goods")
  1050. except Exception as e:
  1051. print("插入错误Goods:", e)
  1052. conn.rollback()
  1053. sales_rankData = []
  1054. for i in detail_info_value:
  1055. tmp_list = []
  1056. for j in [2,3,4,5,6,8,11,12,13,14]:
  1057. tmp_list.extend([i[j]])
  1058. tmp_list.extend([datetime.now(),datetime.utcnow()])
  1059. sales_rankData.append(tmp_list)
  1060. # print(sales_rankData,len(sales_rankData))
  1061. try:
  1062. insertsql = """insert into
  1063. asj_ads.ProductRank(BigCat_rank, BigCat_title, SmallCat_rank, SmallCat_title, brandName, itemName, asin, countryCode, marketplace_id, seller_id, update_time,time_UTC)
  1064. values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"""
  1065. conn.begin()
  1066. cursor.executemany(insertsql, tuple(sales_rankData))
  1067. conn.commit()
  1068. print("插入完成rank")
  1069. # return '插入完成'
  1070. except Exception as e:
  1071. print("插入错误rank:", e)
  1072. conn.rollback()
  1073. @staticmethod
  1074. def get_detail_cat(cate_item, asin, mak, countryCode): # used in listing_infoTable, category sp-api - 获取listing的信息数据
  1075. try:
  1076. detail_info = cate_item.get_catalog_item(asin=asin, **{
  1077. "includedData": ["images,productTypes,salesRanks,summaries"],
  1078. "marketplaceIds": [str(mak[countryCode].marketplace_id)]})
  1079. payload = detail_info.payload
  1080. # print(payload)
  1081. try:
  1082. main_image = payload['images'][0]['images'][0]['link'] if len(payload['images']) > 0 else "#"
  1083. except:
  1084. main_image = '#-'
  1085. try:
  1086. productTypes = payload['productTypes'][0]['productType'] if len(payload['productTypes'][0]) > 0 else "#"
  1087. except:
  1088. productTypes = '#-'
  1089. try:
  1090. # print(payload['ranks'][0])
  1091. if len(payload['ranks'][0]) > 0:
  1092. BigCat_rank = payload['ranks'][0]['ranks'][0]['rank']
  1093. BigCat_title = payload['ranks'][0]['ranks'][0]['title']
  1094. SmallCat_rank = payload['ranks'][0]['ranks'][1]['rank']
  1095. SmallCat_title = payload['ranks'][0]['ranks'][1]['title']
  1096. else:
  1097. BigCat_rank, BigCat_title, SmallCat_rank, SmallCat_title = 0, '#', 0, '#'
  1098. except:
  1099. BigCat_rank, BigCat_title, SmallCat_rank, SmallCat_title = 0, '#-', 0, '#-'
  1100. try:
  1101. if len(payload['summaries'][0]) > 0:
  1102. brandName = payload['summaries'][0]['brandName']
  1103. browseNode = payload['summaries'][0]['browseNode']
  1104. itemName = payload['summaries'][0]['itemName']
  1105. else:
  1106. brandName, browseNode, itemName = '#', '#', '#'
  1107. except:
  1108. brandName, browseNode, itemName = '#-', '#-', '#-'
  1109. return {'main_image': main_image, 'productTypes': productTypes, 'BigCat_rank': BigCat_rank,
  1110. 'BigCat_title': BigCat_title, 'SmallCat_rank': SmallCat_rank, 'SmallCat_title': SmallCat_title,
  1111. 'brandName': brandName, 'browseNode': browseNode, 'itemName': itemName}
  1112. except:
  1113. return {'main_image': '', 'productTypes': '', 'BigCat_rank': 0,
  1114. 'BigCat_title': '', 'SmallCat_rank': 0, 'SmallCat_title': '',
  1115. 'brandName': '', 'browseNode': '', 'itemName': ''}
  1116. def GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT(self, refresh_token,seller_id,days=-3,**kwargs):
  1117. shopReportday = (datetime.now() + timedelta(days=days)).strftime("%Y-04-21")
  1118. shopReportday_E = (datetime.now() + timedelta(days=days)).strftime("%Y-04-27")
  1119. print(shopReportday)
  1120. para = {"reportType": ReportType.GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT,"reportOptions":{"reportPeriod": "WEEK"},"dataStartTime": shopReportday, "dataEndTime": shopReportday_E,}
  1121. reportid = self.create_report(**para)
  1122. @staticmethod
  1123. def GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT_DAY(self,**kwargs):
  1124. shopReportday = "2024-09-04"
  1125. shopReportday_E = "2024-09-04"
  1126. print(shopReportday)
  1127. para = {"reportType": ReportType.GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT,"reportOptions":{"reportPeriod": "DAY"},"dataStartTime": shopReportday, "dataEndTime": shopReportday_E,}
  1128. reportid = self.create_report(**para)
  1129. def BRAND_ANALYTICS_TEXT_deal(self,text):
  1130. pass
  1131. def GET_SELLER_FEEDBACK_DATA(self,refresh_token,seller_id,days,**a_kw):
  1132. kwargs = a_kw
  1133. country_code = str(self.marketplace)[-2:]
  1134. if country_code == 'GB':
  1135. country_code = "UK"
  1136. insert_time = datetime.now()
  1137. shopReportday = (datetime.now().date() + timedelta(days=days)).strftime("%Y-%m-%d")
  1138. shopReportday_E = (datetime.now().date() + timedelta(days=days)).strftime("%Y-%m-%d")
  1139. print(shopReportday)
  1140. para = {"reportType": ReportType.GET_SELLER_FEEDBACK_DATA,#GET_SELLER_FEEDBACK_DATA,# GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE |GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA
  1141. "dataStartTime": shopReportday,
  1142. "dataEndTime": shopReportday_E, "reportOptions":{"reportPeriod": "DAY"}}
  1143. try:
  1144. reportid = self.create_report(**para)
  1145. decom_df = self.decompression(reportid)
  1146. except:
  1147. return pd.DataFrame()
  1148. if len(decom_df) == 0:
  1149. return reportid
  1150. decom_df[decom_df.select_dtypes(float).columns] = decom_df[decom_df.select_dtypes(float).columns].applymap(
  1151. lambda x: str(x) if not pd.isna(x) else '')
  1152. decom_df[decom_df.select_dtypes(int).columns] = decom_df[decom_df.select_dtypes(int).columns].applymap(
  1153. lambda x: str(x) if not pd.isna(x) else '')
  1154. decom_df[decom_df.select_dtypes(datetime).columns] = decom_df[
  1155. decom_df.select_dtypes(datetime).columns].applymap(lambda x: str(x) if not pd.isna(x) else '')
  1156. decom_df.fillna('', inplace=True)
  1157. # decom_df.to_csv('order.csv')
  1158. decom_df["ReportDate"] = parse(shopReportday)
  1159. # decom_df['timezone'] = decom_df["purchase-date"].map(lambda x: parse(x).tzname()).fillna(method='bfill')
  1160. decom_df['timezone'] = "UTC"
  1161. # print("==========================================================")
  1162. decom_df['seller_id'] = seller_id
  1163. decom_df['country_code'] = country_code
  1164. decom_df['insert_time'] = datetime.now()
  1165. df_feedback = decom_df.copy()
  1166. df_feedback[["date", "rating", "comment", "order_id"]] = df_feedback[["Date", "Rating", "Comments", "Order ID"]]
  1167. df_feedback['isFeedback'] = 1
  1168. df_feedback['date'] = df_feedback['date'].map(lambda x: pd.to_datetime(x).date())
  1169. df_feedback['seller_id'] = seller_id
  1170. df_feedback['country_code'] = country_code
  1171. client = get_client(host='3.93.43.158', port=8123, username='root',
  1172. password='6f0eyLuiVn3slzbGWpzI')
  1173. try:
  1174. client.insert_df(table='ams.performance', df=df_feedback[
  1175. ["date", "rating", "comment", "order_id", "isFeedback", 'seller_id', 'country_code','insert_time']],
  1176. column_names=["date", "rating", "comment", "order_id", "isFeedback", 'seller_id',
  1177. 'country_code','insert_time'])
  1178. except Exception as e:
  1179. print(e)
  1180. client.close()
  1181. # print(decom_df.columns)
  1182. # print(decom_df)
  1183. return reportid
  1184. def GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA(self,refresh_token,seller_id,days,**a_kw):
  1185. kwargs = a_kw
  1186. country_code = str(self.marketplace)[-2:]
  1187. if country_code == 'GB':
  1188. country_code = "UK"
  1189. insert_time = datetime.now()
  1190. shopReportday = (datetime.now().date() + timedelta(days=days)).strftime("%Y-%m-%d")
  1191. shopReportday_E = (datetime.now().date() + timedelta(days=days)).strftime("%Y-%m-%d")
  1192. # print(shopReportday)
  1193. # print("1"*50)
  1194. para = {"reportType": ReportType.GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA,#GET_SELLER_FEEDBACK_DATA,# GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE |GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA
  1195. "dataStartTime": shopReportday,
  1196. "dataEndTime": shopReportday_E, "reportOptions":{"reportPeriod": "DAY"}}
  1197. try:
  1198. reportid = self.create_report(**para)
  1199. decom_df = self.decompression(reportid)
  1200. except:
  1201. return pd.DataFrame()
  1202. if len(decom_df) == 0:
  1203. return reportid
  1204. # print("2"*100)
  1205. decom_df[decom_df.select_dtypes(float).columns] = decom_df[decom_df.select_dtypes(float).columns].applymap(
  1206. lambda x: str(x) if not pd.isna(x) else '')
  1207. decom_df[decom_df.select_dtypes(int).columns] = decom_df[decom_df.select_dtypes(int).columns].applymap(
  1208. lambda x: str(x) if not pd.isna(x) else '')
  1209. decom_df[decom_df.select_dtypes(datetime).columns] = decom_df[
  1210. decom_df.select_dtypes(datetime).columns].applymap(lambda x: str(x) if not pd.isna(x) else '')
  1211. decom_df.fillna('', inplace=True)
  1212. # decom_df.to_csv('order.csv')
  1213. decom_df["ReportDate"] = parse(shopReportday)
  1214. # decom_df['timezone'] = decom_df["purchase-date"].map(lambda x: parse(x).tzname()).fillna(method='bfill')
  1215. decom_df['timezone'] = "UTC"
  1216. print("==========================================================")
  1217. decom_df['seller_id'] = seller_id
  1218. decom_df['country_code'] = country_code
  1219. decom_df['insert_time'] = datetime.now()
  1220. df_fbaR = decom_df.copy()
  1221. df_fbaR[['date', 'order_id', 'product_name', 'detailed_disposition', 'comment']] = df_fbaR[
  1222. ['return-date', 'order-id', 'product-name', 'detailed-disposition', 'customer-comments']]
  1223. # df_fbaR[['return-date','order-id','sku','asin','fnsku','product-name','quantity','reason','detailed-disposition','customer-comments']]
  1224. df_fbaR['isFeedback'] = 0
  1225. df_fbaR['date'] = df_fbaR['date'].map(lambda x: pd.to_datetime(x).date())
  1226. df_fbaR['comment'].fillna("", inplace=True)
  1227. # df_fbaR[['order_id','sku','asin','fnsku','detailed_disposition','product_name','isFeedback','date']]
  1228. client = get_client(host='3.93.43.158', port=8123, username='root',
  1229. password='6f0eyLuiVn3slzbGWpzI')
  1230. try:
  1231. client.insert_df(table='ams.performance', df=df_fbaR[
  1232. ['order_id', 'sku', 'asin', 'fnsku', 'detailed_disposition', 'product_name', 'isFeedback', 'date',
  1233. 'comment', 'seller_id', 'country_code','insert_time']],
  1234. column_names=['order_id', 'sku', 'asin', 'fnsku', 'detailed_disposition', 'product_name',
  1235. 'isFeedback', 'date', 'comment', 'seller_id', 'country_code','insert_time'])
  1236. except Exception as e:
  1237. print(e)
  1238. client.close()
  1239. # print(decom_df.columns)
  1240. # print(decom_df)
  1241. return reportid
  1242. def GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE(self, refresh_token, seller_id, days, **a_kw):
  1243. kwargs = a_kw
  1244. country_code = str(self.marketplace)[-2:]
  1245. if country_code == 'GB':
  1246. country_code = "UK"
  1247. insert_time = datetime.now()
  1248. shopReportday = (datetime.now() + timedelta(days=days)).strftime("%Y-%m-%d")
  1249. shopReportday_E = (datetime.now() + timedelta(days=days)).strftime("%Y-%m-%d")
  1250. para = {"reportType": ReportType.GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE,
  1251. # GET_SELLER_FEEDBACK_DATA,# GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE |GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA
  1252. "dataStartTime": shopReportday,
  1253. "dataEndTime": shopReportday_E, "reportOptions": {"reportPeriod": "DAY"}}
  1254. try:
  1255. reportid = self.create_report(**para)
  1256. decom_df = self.decompression(reportid)
  1257. except:
  1258. return pd.DataFrame()
  1259. if len(decom_df) == 0:
  1260. return reportid
  1261. decom_df[decom_df.select_dtypes(float).columns] = decom_df[decom_df.select_dtypes(float).columns].applymap(lambda x: str(x) if not pd.isna(x) else '')
  1262. decom_df[decom_df.select_dtypes(int).columns] = decom_df[decom_df.select_dtypes(int).columns].applymap(lambda x: str(x) if not pd.isna(x) else '')
  1263. decom_df[decom_df.select_dtypes(datetime).columns] = decom_df[decom_df.select_dtypes(datetime).columns].applymap(lambda x: str(x) if not pd.isna(x) else '')
  1264. decom_df.fillna('', inplace=True)
  1265. # decom_df.to_csv('order.csv')
  1266. decom_df["ReportDate"] = parse(shopReportday)
  1267. # decom_df['timezone'] = decom_df["purchase-date"].map(lambda x: parse(x).tzname()).fillna(method='bfill')
  1268. decom_df['timezone'] = "UTC"
  1269. decom_df['seller_id'] = seller_id
  1270. decom_df['country_code'] = country_code
  1271. decom_df['insert_time'] = datetime.now()
  1272. decom_df['Order date'] = decom_df['Order date'].map(lambda x:'' if pd.isna(x) or x=='' else parse(x))
  1273. decom_df['Return request date'] = decom_df['Return request date'].map(lambda x:'' if pd.isna(x) or x=='' else parse(x))
  1274. client = get_client(host='3.93.43.158', port=8123, username='root',
  1275. password='6f0eyLuiVn3slzbGWpzI')
  1276. # print(decom_df.dtypes)
  1277. # print("======================-------------==================")
  1278. # decom_df['SafeT claim reimbursement amount'] = decom_df['SafeT claim reimbursement amount'].map(str)
  1279. # decom_df['Refunded Amount'] = decom_df['Refunded Amount'].map(str)
  1280. # decom_df['Label cost'] = decom_df['Label cost'].astype('string')
  1281. # print(decom_df.dtypes)
  1282. print("="*100)
  1283. try:
  1284. client.insert_df('ams.return_record',df=decom_df[['Order ID', 'Order date', 'Return request date',
  1285. 'Return request status', 'Amazon RMA ID', 'Merchant RMA ID',
  1286. 'Label type', 'Label cost', 'Currency code', 'Return carrier',
  1287. 'Tracking ID', 'Label to be paid by', 'A-to-Z Claim', 'Is prime',
  1288. 'ASIN', 'Merchant SKU', 'Item Name', 'Return quantity', 'Return Reason',
  1289. 'In policy', 'Return type', 'Resolution', 'Invoice number',
  1290. 'Return delivery date', 'Order Amount', 'Order quantity',
  1291. 'SafeT Action reason', 'SafeT claim id', 'SafeT claim state',
  1292. 'SafeT claim creation time', 'SafeT claim reimbursement amount',
  1293. 'Refunded Amount', 'ReportDate', 'timezone', 'seller_id',
  1294. 'country_code', 'insert_time']],column_names=['Order_ID', 'Order_date', 'Return_request_date','Return_request_status',
  1295. 'Amazon_RMA_ID', 'Merchant_RMA_ID','Label_type', 'Label_cost', 'Currency_code',
  1296. 'Return_carrier','Tracking_ID', 'Label_to_be_paid_by', 'A-to-Z_Claim', 'Is_prime',
  1297. 'ASIN', 'Merchant_SKU', 'Item_Name', 'Return_quantity', 'Return_Reason',
  1298. 'In_policy', 'Return_type', 'Resolution', 'Invoice_number','Return_delivery_date',
  1299. 'Order_Amount', 'Order_quantity','SafeT_Action_reason', 'SafeT_claim_id',
  1300. 'SafeT_claim_state','SafeT_claim_creation_time', 'SafeT_claim_reimbursement_amount',
  1301. 'Refunded_Amount', 'ReportDate', 'timezone', 'seller_id','country_code', 'insert_time'])
  1302. except Exception as e:
  1303. print(e)
  1304. client.close()
  1305. return reportid
  1306. # def GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE(self, refresh_token,seller_id,days=-1,**kwargs):
  1307. # countryCode = None if kwargs.get("countryCode") == None else kwargs.get("countryCode")
  1308. # shopReportday = (datetime.now() + timedelta(days=days)).strftime("%Y-%m-%d")
  1309. # print(shopReportday)
  1310. # para = {"reportType": ReportType.GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL,
  1311. # "dataStartTime": shopReportday, "dataEndTime": shopReportday,
  1312. # "reportOptions": {"ShowSalesChannel": "true"}}
  1313. # reportid = self.create_report(**para) # {"ShowSalesChannel":"true"}
  1314. # decom_df = self.decompression(reportid)
  1315. # decom_df['seller_id'] = seller_id
  1316. # country_code = str(self.marketplace)[-2:]
  1317. # if country_code == 'GB':
  1318. # country_code = "UK"
  1319. # # decom_df['country_code'] = "UK"
  1320. # decom_df['country_code'] = country_code
  1321. # decom_df['insert_time'] = datetime.now()
  1322. # shopReportday = "2024-11-04"
  1323. # shopReportday_E = "2024-11-04"
  1324. # print(shopReportday)
  1325. # para = {"reportType": ReportType.GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE,
  1326. # # GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE |GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA
  1327. # "dataStartTime": shopReportday,
  1328. # "dataEndTime": shopReportday_E, "reportOptions": {"reportPeriod": "DAY"}}
  1329. # reportid = self.create_report(**para)
  1330. # print(reportid)
  1331. # return reportid
  1332. @staticmethod
  1333. def data_judge_secondTry(refresh_token,sp_api,data_type,seller_id,auth_conn,days=-1,**kwargs): # Main-retry, 重试函数,重试次数2次
  1334. a_kw = kwargs
  1335. try:
  1336. SpApiRequest.data_judge(refresh_token,sp_api, data_type, seller_id, auth_conn,days=days,**a_kw)
  1337. except Exception as e:
  1338. try:
  1339. if e.args[0][0]['code']=='InvalidInput':
  1340. return ''
  1341. print('first time to try...')
  1342. time.sleep(15)
  1343. SpApiRequest.data_judge(refresh_token,sp_api, data_type, seller_id, auth_conn,days=days,**a_kw)
  1344. except Exception as e:
  1345. print(e)
  1346. print('twice time to try...')
  1347. time.sleep(20)
  1348. SpApiRequest.data_judge(refresh_token, sp_api, data_type, seller_id, auth_conn, days=days, **a_kw)
  1349. @staticmethod
  1350. def data_judge(refresh_token,sp_api,data_type,seller_id,auth_conn,days=-1,**kwargs): # select Report type - 报告获取类型判断
  1351. a_kw = kwargs
  1352. if data_type == "GET_FLAT_FILE_OPEN_LISTINGS_DATA":
  1353. return sp_api.GET_FLAT_FILE_OPEN_LISTINGS_DATA(refresh_token,auth_conn,seller_id,days)
  1354. elif data_type =="GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL":
  1355. # for day_ in range(31,1):
  1356. # sp_api.GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL(seller_id,days=day_*-1)
  1357. return sp_api.GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL(refresh_token,seller_id,days,**a_kw)
  1358. # elif data_type =="GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE":
  1359. # return sp_api.GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE(refresh_token,seller_id,days)
  1360. elif data_type =="GET_SALES_AND_TRAFFIC_REPORT":
  1361. return sp_api.GET_SALES_AND_TRAFFIC_REPORT(refresh_token,seller_id,days,**a_kw)
  1362. elif data_type == "GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA":
  1363. return sp_api.GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA(refresh_token,auth_conn,seller_id,days,**a_kw)
  1364. elif data_type=="GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT":
  1365. return sp_api.GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT(refresh_token,seller_id,days,**a_kw)
  1366. elif data_type=="GET_SELLER_FEEDBACK_DATA":
  1367. return sp_api.GET_SELLER_FEEDBACK_DATA(refresh_token,seller_id,days,**a_kw)
  1368. elif data_type=="GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA":
  1369. return sp_api.GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA(refresh_token,seller_id,days,**a_kw)
  1370. elif data_type=="GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE":
  1371. return sp_api.GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE(refresh_token,seller_id,days,**a_kw)
  1372. else:
  1373. return ""
  1374. @classmethod
  1375. def get_allShops(cls,data_type=Literal["GET_FLAT_FILE_OPEN_LISTINGS_DATA","GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL"],days=-1,**kwargs): # Main-AllCountries-AuthAndPost, 所有店铺数据报告获取的主要函数
  1376. df = cls.auth_info()
  1377. zosi = df.query("account_name=='AM-ZOSI-US'")['refresh_token'].to_numpy().tolist()
  1378. # print(zosi)
  1379. refreshtoken_list = df.query("account_name!='AM-ZOSI-US'")['refresh_token'].to_numpy().tolist()
  1380. zosi.extend(refreshtoken_list)
  1381. refreshtoken_list = zosi.copy()
  1382. print(len(refreshtoken_list))
  1383. # refreshtoken_list = refreshtoken_list+refreshtoken_li
  1384. a_kw = kwargs
  1385. for refresh_token in refreshtoken_list:
  1386. aws_credentials = {
  1387. 'refresh_token': refresh_token,
  1388. 'lwa_app_id': 'amzn1.application-oa2-client.1f9d3d4747e14b22b4b598e54e6b922e', # 卖家中心里面开发者资料LWA凭证
  1389. 'lwa_client_secret': 'amzn1.oa2-cs.v1.13ebfa8ea7723b478215a2b61f4dbf5ca6f6927fa097c2dd0941a66d510aca7f',
  1390. 'aws_access_key': 'AKIARBAGHTGOZC7544GN',
  1391. 'aws_secret_key': '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',
  1392. 'role_arn': 'arn:aws:iam::070880041373:role/Amazon_SP_API_ROLE'
  1393. }
  1394. single_info = df.query("refresh_token==@refresh_token")
  1395. region_circle = single_info['region'].values[0]
  1396. seller_id = single_info['selling_partner_id'].values[0]
  1397. account_name = single_info['account_name'].values[0]
  1398. if region_circle == 'NA':
  1399. pass
  1400. for marketplace in [Marketplaces.US, Marketplaces.BR, Marketplaces.CA,Marketplaces.MX]:
  1401. sp_api = SpApiRequest(aws_credentials, marketplace)
  1402. a_kw['countryCode'] = str(marketplace)[-2:]
  1403. # print(a_kw)
  1404. try:
  1405. print("refresh_token:",refresh_token,'data_type:',data_type,'seller_id:',seller_id,'marketplace:',marketplace.marketplace_id,'country_code:',a_kw['countryCode'],sep='\n')
  1406. auth_conn = SpApiRequest.Token_auth()
  1407. print("CHECK>>>>>>>>>>>>>>>>>>>>>>>>>")
  1408. # print(a_kw)
  1409. cls.data_judge_secondTry(refresh_token,sp_api, data_type, seller_id, auth_conn,days,**a_kw)
  1410. ## sp_api.GET_FLAT_FILE_OPEN_LISTINGS_DATA(auth_conn, seller_id)
  1411. except Exception as e:
  1412. print(e)
  1413. time.sleep(3.5)
  1414. elif region_circle == 'EU':
  1415. pass
  1416. for marketplace in [Marketplaces.DE,Marketplaces.AE, Marketplaces.BE, Marketplaces.PL,
  1417. Marketplaces.EG,Marketplaces.ES, Marketplaces.GB, Marketplaces.IN, Marketplaces.IT,
  1418. Marketplaces.NL, Marketplaces.SA, Marketplaces.SE, Marketplaces.TR,Marketplaces.UK,Marketplaces.FR,
  1419. ]:
  1420. sp_api = SpApiRequest(aws_credentials, marketplace)
  1421. a_kw['countryCode'] = str(marketplace)[-2:]
  1422. try:
  1423. auth_conn = SpApiRequest.Token_auth()
  1424. cls.data_judge_secondTry(refresh_token,sp_api, data_type, seller_id, auth_conn,days,**a_kw)
  1425. ## sp_api.GET_FLAT_FILE_OPEN_LISTINGS_DATA(auth_conn, seller_id)
  1426. except Exception as e:
  1427. print(e)
  1428. time.sleep(3.5)
  1429. else:
  1430. # if region_circle not in ['NA','EU']:
  1431. auth_conn = SpApiRequest.Token_auth()
  1432. print(region_circle)
  1433. marketplace = eval(f'Marketplaces.{region_circle}')
  1434. sp_api = SpApiRequest(aws_credentials, marketplace)
  1435. a_kw['countryCode'] = str(marketplace)[-2:]
  1436. cls.data_judge_secondTry(refresh_token,sp_api, data_type, seller_id, auth_conn,days,**a_kw)
  1437. ## sp_api.GET_FLAT_FILE_OPEN_LISTINGS_DATA(auth_conn, seller_id)
  1438. def price_tracker(self,asin):
  1439. base_product = Products(credentials=self.credentials,marketplace=Marketplaces.US)
  1440. return base_product.get_product_pricing_for_asins(asin)
  1441. if __name__ == '__main__':
  1442. for days in range(-35,-2):
  1443. SpApiRequest.get_allShops("GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE",days=days,**{})
  1444. # SpApiRequest.listing_infoTable()
  1445. # rel = SpApiRequest.get_catelog(account_name='AM-ZOSI-US',country=Marketplaces.US,asin='B0B8CPHSL4')
  1446. # print(rel)
  1447. # SpApiRequest
  1448. ###TEST
  1449. # df = SpApiRequest.auth_info()
  1450. # print(df)
  1451. # zosi = df.query("account_name=='AM-ZOSI-US'")['refresh_token'].to_numpy().tolist()
  1452. # # print(zosi)
  1453. # refreshtoken_list = df.query("account_name!='AM-ZOSI-US'")['refresh_token'].to_numpy().tolist()
  1454. # zosi.extend(refreshtoken_list)
  1455. # refreshtoken_list = zosi.copy()
  1456. # aws_credentials = {
  1457. # 'refresh_token': refreshtoken_list[0],
  1458. # 'lwa_app_id': 'amzn1.application-oa2-client.1f9d3d4747e14b22b4b598e54e6b922e', # 卖家中心里面开发者资料LWA凭证
  1459. # 'lwa_client_secret': 'amzn1.oa2-cs.v1.13ebfa8ea7723b478215a2b61f4dbf5ca6f6927fa097c2dd0941a66d510aca7f',
  1460. # 'aws_access_key': 'AKIARBAGHTGOZC7544GN',
  1461. # 'aws_secret_key': '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',
  1462. # 'role_arn': 'arn:aws:iam::070880041373:role/Amazon_SP_API_ROLE'
  1463. # }
  1464. # sp_api = SpApiRequest(aws_credentials, Marketplaces.US)
  1465. # id = sp_api.GET_SELLER_FEEDBACK_DATA()
  1466. # print(id)
  1467. # df= sp_api.decompression(str(id))
  1468. # print(df)
  1469. # df.to_csv('GET_SELLER_FEEDBACK_DATA.csv')
  1470. #### END
  1471. # print(sp_api.price_tracker(['B00L3W2QJ2']))
  1472. # shopReportday = "2024-08-25"
  1473. # shopReportday_E = "2024-08-31"
  1474. # print(shopReportday)
  1475. # # para = {"reportType": ReportType.GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT, "reportOptions": {"reportPeriod": "WEEK"},
  1476. # # "dataStartTime": shopReportday, "dataEndTime": shopReportday_E, }
  1477. # # report = Reports(credentials=aws, marketplace=self.marketplace)
  1478. # report = Reports(credentials=aws_credentials, marketplace=Marketplaces.US)
  1479. # rel = report.create_report(
  1480. # reportType=ReportType.GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT, marketplaceIds=['ATVPDKIKX0DER'],
  1481. # reportOptions={"reportPeriod": "WEEK"}, dataStartTime=shopReportday, dataEndTime=shopReportday_E
  1482. # )
  1483. # reportId = rel.payload.get("reportId")
  1484. #
  1485. # while True:
  1486. # reportId_info = report.get_report(reportId=reportId)
  1487. # # print(reportId_info.payload)
  1488. # print("please wait...",reportId_info.payload.get("processingStatus"))
  1489. # # print(reportId_info)
  1490. # if reportId_info.payload.get("processingStatus") == ProcessingStatus.DONE:
  1491. # print(reportId_info.payload)
  1492. # reportDocumentId = reportId_info.payload.get("reportDocumentId")
  1493. # rp_table = report.get_report_document(reportDocumentId=reportDocumentId, download=False)
  1494. # print(rp_table)
  1495. # elif reportId_info.payload.get("processingStatus") == ProcessingStatus.FATAL:
  1496. # reportDocumentId = reportId_info.payload.get("reportDocumentId")
  1497. # rp_table = report.get_report_document(reportDocumentId=reportDocumentId, download=False)
  1498. # import requests
  1499. # # requests.get(rp_table['payload']['url'])
  1500. #
  1501. # df = pd.read_table(filepath_or_buffer=rp_table.payload['url'], compression={"method": 'gzip'},
  1502. # encoding='iso-8859-1')
  1503. # # df = pandas.read_csv(rp_table['payload']['url'],)
  1504. # print(df.to_dict(orient='records'))
  1505. # break