Browse Source

feat(all): 0628

wengao 11 months ago
parent
commit
6b19f76893

+ 4 - 1
sync_amz_data/public/amz_ad_client.py

@@ -628,7 +628,10 @@ class SBClient(BaseClient):
 
     def get_keywords(self,**param):
         url_path = "/sb/keywords"
-        return self._request(url_path, method="GET", params=param)
+        headers = {
+            'Accept': "application/vnd.sbkeyword.v3.2+json"
+        }
+        return self._request(url_path, method="GET", params=param, headers=headers)
 
     def get_keyword(self,keywordid):
         url_path = f'/sb/keywords/{keywordid}'

+ 1 - 0
sync_amz_data/tasks/datainsert/SB/mysql_datainsert_sbkeyword_v3.py

@@ -84,6 +84,7 @@ class SbKeyword:
 
     def dataconvert(self):
         df = self.get_sbkeywords_data()
+        df.to_csv('D:/code/test/sbkeywords.csv', index=False)
         col = ['keywordId', 'adGroupId', 'campaignId', 'keywordText',
                'nativeLanguageKeyword', 'matchType', 'state', 'bid']
         old = pd.DataFrame(data=[], columns=col)

+ 0 - 2
sync_amz_data/tasks/datainsert/SP/mysql_datainsert_sp_targetsbid_recommendations.py

@@ -133,8 +133,6 @@ class SpTargetsBidRecommendations:
             temlist.append(tem)
             time.sleep(1)
         return temlist
-        
-        return tem
 
 
 if __name__ == '__main__':

+ 0 - 1
sync_amz_data/tasks/datainsert/SP/mysql_datainsert_spgroup.py

@@ -74,7 +74,6 @@ class SpGroup:
         list_group = tem.iter_adGroups(**{"includeExtendedDataFields": True})
         df_group = pd.json_normalize(list(list_group))
         return df_group
-    #----------------------------
 
     def get_keyword_adgroupid_list(self):
         heads = self.heads

+ 9 - 10
sync_amz_data/tasks/datainsert/pl_update.py

@@ -1,19 +1,17 @@
-from sync_amz_data.DataTransform.Data_ETL import SP_ETL
+from urllib.parse import urljoin
 
 import requests
-from urllib.parse import urljoin
-from sync_amz_data.public.amz_ad_client import SPClient
+
 from sync_amz_data.settings import AWS_LWA_CLIENT
-import pandas as pd
-import json
 from sync_amz_data.tasks.datainsert.wg import LADS
 
+
 class RateLimitError(Exception):
     def __init__(self, retry_after: str = None):
         self.retry_after = retry_after
 
 
-def request(url_path: str, method: str = "GET", head: dict = None, params: dict = None, body: dict = None, AD = LADS):
+def request(url_path: str, method: str = "GET", head: dict = None, params: dict = None, body: dict = None, AD=LADS):
     ADS = AD
     resp = requests.session().request(
         method=method,
@@ -52,10 +50,10 @@ class ProductLineDetail:
         tem = request(url_path=url_path, head=heads, params=params)
         if tem.get('data') is not None:
             _ = tem.get('data')
-            out = _[0].get('refresh_token')
+            retoken = _[0].get('refresh_token')
         else:
-            out = None
-        return out
+            retoken = None
+        return retoken
     
     def updata_create(self):
         heads = self.heads
@@ -63,7 +61,8 @@ class ProductLineDetail:
         params = {'profileId': self.profile_id}
         tem = request(url_path=url_path, head=heads, method="GET", params=params)
         return tem
-    
+
+
 if __name__ == '__main__':
     a = ProductLineDetail(profile_id="3006125408623189")
     # out = a.get_sptargets_data()

+ 1 - 0
sync_amz_data/tasks/datainsert/wg.py

@@ -1,2 +1,3 @@
 # LADS = "http://192.168.1.19:8001/"
+# LADS = "http://127.0.0.1:8000/"
 LADS = "https://ads.vzzon.com"

+ 70 - 0
sync_amz_data/tasks/report_manage_data_load.py

@@ -0,0 +1,70 @@
+import pandas as pd
+import json
+import requests
+from urllib.parse import urljoin
+
+from sync_amz_data.tasks.utils import MAPING
+from sync_amz_data.tasks.datainsert.wg import LADS
+
+class RateLimitError(Exception):
+    def __init__(self, retry_after: str = None):
+        self.retry_after = retry_after
+
+
+def request(url_path: str, method: str = "GET", head: dict = None, params: dict = None, body: dict = None, AD = LADS):
+    ADS = AD
+    resp = requests.session().request(
+        method=method,
+        url=urljoin(ADS, url_path),
+        headers=head,
+        params=params,
+        json=body,
+    )
+    if resp.status_code == 429:
+        raise RateLimitError(resp.headers.get("Retry-After"))
+    if resp.status_code >= 400:
+        raise Exception(resp.text)
+    return resp.json()
+
+
+class ReportManageDataLoad:
+    def __init__(self, file_path, sheet_name):
+        # self.file_path = 'D:/code/test/20240422.xlsx'
+        # self.sheet_name = '2024年03月份销售额'
+        self.maping = MAPING
+        self.heads = {'X-Token': "da4ab6bc5cbf1dfa"}
+        self.upcreate_url_path = ""
+        self.file_path = file_path
+        self.sheet_name = sheet_name
+
+
+    def data_load(self):
+        df = pd.read_excel(self.file_path, sheet_name=self.sheet_name)
+        task_df = df[['平台编号', '平台名称', '运营', '国家', '品牌', '回款币种', '回款/余额币种']].copy()
+        task_df['运营'] = "文高"
+        task_df.rename(
+            columns={'平台编号': 'platformNumber', '平台名称': 'platformName', '运营': 'user', '国家': 'country',
+                     '品牌': 'brandName', '回款币种': 'currencyCode', '回款/余额币种': 'currencyCodePlatform'},
+            inplace=True)
+        task_df.currencyCode = task_df.currencyCode.map(self.maping)
+        task_df = task_df.dropna()
+        user_list = list(set(task_df.user.to_list()))
+        json_str = task_df.to_json(orient='records', date_format='iso')
+        out_data = json.loads(json_str)
+        out = {'user_list': user_list, 'out_data': out_data}
+        return out
+
+    def updata_create(self):
+        body = self.data_load().get('out_data')
+        heads = self.heads
+        url_path = self.upcreate_url_path
+        tem = request(url_path=url_path, head=heads, body=body, method="POST")
+        return tem
+
+
+
+
+# 使用pandas的read_excel函数读取数据
+
+
+

+ 2 - 0
sync_amz_data/tasks/utils.py

@@ -0,0 +1,2 @@
+MAPING = {'加元': 'CAD', '加币': 'CAD', '日元': 'JPY', '林吉特': 'MYR', '欧元': 'EUR', '澳元': 'AUD',
+          '美元': 'USD', '英镑': 'GBP', '菲律宾比索': 'PHP'}

+ 32 - 19
wg_alldatainsert.py

@@ -1,29 +1,31 @@
-from sync_amz_data.tasks.datainsert.mysql_datainsert_portfolios import Portfolios
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spcampaign import SpCampaign
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spgroup import SpGroup
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spkeyword import SpKeyword
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sp_budget_recommendation import SpBudgetRecommendation
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spads import SpAds
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sp_targetsbid_recommendations import SpTargetsBidRecommendations
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sptarget import SpTargets
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sp_targetsbid_recommendations_v2 import SpTargetsBidRecommendationsV2
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spnegativekeyword import SpNegativeKeyword
-from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spnegativetarget import SpNegativeTarget
+import time
 
+from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sb_keywordsbid_recommendations_v3 import \
+    SbkeywordsBidRecommendations
+from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbads import SbAds
 from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbcampaign import SbCampaign
 from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbgroup import SbGroup
 from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbkeyword_v3 import SbKeyword
-from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbads import SbAds
-from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sb_keywordsbid_recommendations_v3 import SbkeywordsBidRecommendations
-from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbtarget_v3 import SbTargets
-from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbtargetbid_recommendations_v3 import SbtargetsBidRecommendations
 from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbnegativekeyword import SbNegtiveKeyword
+from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbtarget_v3 import SbTargets
+from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbtargetbid_recommendations_v3 import \
+    SbtargetsBidRecommendations
 from sync_amz_data.tasks.datainsert.SB.mysql_datainsert_sbthemetargeting_v3 import SbThemeTargeting
-
-from sync_amz_data.tasks.datainsert.mysql_datainsert_assets import Assets
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sp_budget_recommendation import SpBudgetRecommendation
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sp_targetsbid_recommendations import SpTargetsBidRecommendations
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sp_targetsbid_recommendations_v2 import \
+    SpTargetsBidRecommendationsV2
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spads import SpAds
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spcampaign import SpCampaign
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spgroup import SpGroup
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spkeyword import SpKeyword
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spnegativekeyword import SpNegativeKeyword
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_spnegativetarget import SpNegativeTarget
+from sync_amz_data.tasks.datainsert.SP.mysql_datainsert_sptarget import SpTargets
 from sync_amz_data.tasks.datainsert.categories_updata import Categories
+from sync_amz_data.tasks.datainsert.mysql_datainsert_assets import Assets
+from sync_amz_data.tasks.datainsert.mysql_datainsert_portfolios import Portfolios
 from sync_amz_data.tasks.datainsert.pl_update import ProductLineDetail
-import time
 
 
 def protime(start_time):
@@ -71,6 +73,12 @@ spto = spt.updata_create()
 print("SpTargets", spto)
 protime(start_time)
 
+start_time = time.time()
+spg = SpGroup(profile_id="3006125408623189")
+spgo = spg.updata_create()
+print("SpGroup", spgo)
+protime(start_time)
+
 start_time = time.time()
 spa = SpAds(profile_id="3006125408623189")
 spao = spa.updata_create()
@@ -107,7 +115,6 @@ spnko = spnk.updata_create()
 print('SpNegativeKeyword', spnko)
 protime(start_time)
 
-
 start_time = time.time()
 sbc = SbCampaign(profile_id="3006125408623189")
 sbco = sbc.updata_create()
@@ -132,6 +139,12 @@ sbao = sba.updata_create()
 print("SbAds", sbao)
 protime(start_time)
 
+start_time = time.time()
+sbg = SbGroup(profile_id="3006125408623189")
+sbgo = sbg.updata_create()
+print("SbGroup", sbgo)
+protime(start_time)
+
 start_time = time.time()
 sbkbr = SbkeywordsBidRecommendations(profile_id="3006125408623189")
 sbkbro = sbkbr.updata_create()