Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- from facebook_business.adobjects.offlineconversiondataset import OfflineConversionDataSet
- from facebook_business.api import FacebookAdsApi
- class FacebookService:
- def __init__(self):
- self.api = FacebookAdsApi.init(app_id='your_app_id', app_secret='your_app_secret',
- access_token='your_access_token')
- self.offline_dataset = OfflineConversionDataSet('offline_set_id')
- def upload_offline_conversion(self, csv_filename):
- df = pd.read_csv(csv_filename, sep=";", dtype=object)
- # df columns are 'order_id', 'value', 'event_time', 'event_name', 'email', 'phone', 'fn', 'ln', 'currency'
- df['value'] = pd.to_numeric(df['value'])
- # event times have to be sent in UNIX timestamp format
- df['event_time'] = (pd.to_datetime(df['event_time']).astype(int) / 10 ** 9).astype(int).astype(str)
- df['match_keys'] = df.apply(lambda row: json.dumps({k: [row[k]] if k in ['email', 'phone'] else row[k] for k in ['email', 'phone', 'fn', 'ln'] if pd.notnull(row[k])}), axis=1)
- del df['email'] # deleting match_keys single columns since they are now useless
- del df['phone']
- del df['fn']
- del df['ln']
- data = df.to_dict(orient="records")
- batch_limit = 2000 # Maximum number of events permitted in a single call
- for i in range(0, len(data), step=batch_limit):
- params = {
- 'upload_tag': 'purchases_upload', # This must be a string, unique for all your uploads, that will let you identify them
- 'data': data[i:i+batch_limit],
- }
- self.offline_dataset.create_event(params=params)
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement