Thanks for your response.
>From this code can you please help me how to send multiple requests with
same resource name.
def add_users_to_customer_match_user_list(client, customer_id,
user_list_resource_name,
customer_data, skip_polling, filename):
"""Uses Customer Match to create and add users to a new user list.
Args:
client: The Google Ads client.
customer_id: The customer ID for which to add the user list.
user_list_resource_name: The resource name of the user list to which to
add users.
customer_data: Processed customer data to be uploaded.
skip_polling: A bool dictating whether to poll the API for completion.
"""
offline_user_data_job_service_client = client.get_service(
'OfflineUserDataJobService')
offline_user_data_job = client.get_type('OfflineUserDataJob')
offline_user_data_job.type_ = client.get_type(
'OfflineUserDataJobTypeEnum'
).OfflineUserDataJobType.CUSTOMER_MATCH_USER_LIST
offline_user_data_job.customer_match_user_list_metadata.user_list = (
user_list_resource_name)
# Issues a request to create an offline user data job.
create_offline_user_data_job_response = (
offline_user_data_job_service_client.create_offline_user_data_job(
customer_id=customer_id, job=offline_user_data_job))
offline_user_data_job_resource_name = (
create_offline_user_data_job_response.resource_name)
print('Created an offline user data job with resource name: '
f'"{offline_user_data_job_resource_name}".')
request = client.get_type('AddOfflineUserDataJobOperationsRequest')
request.resource_name = offline_user_data_job_resource_name
request.operations.extend(build_offline_user_data_job_operations(
client, customer_data))
request.enable_partial_failure = True
# Issues a request to add the operations to the offline user data job.
response =
offline_user_data_job_service_client.add_offline_user_data_job_operations(
request=request)
# Prints the status message if any partial failure error is returned.
# Note: the details of each partial failure error are not printed here.
# Refer to the error_handling/handle_partial_failure.py example to learn
# more.
# Extracts the partial failure from the response status.
partial_failure = getattr(response, 'partial_failure_error', None)
if getattr(partial_failure, 'code', None) != 0:
error_details = getattr(partial_failure, 'details', [])
for error_detail in error_details:
failure_message = client.get_type('GoogleAdsFailure')
# Retrieve the class definition of the GoogleAdsFailure instance
# in order to use the "deserialize" class method to parse the
# error_detail string into a protobuf message object.
failure_object = type(failure_message).deserialize(error_detail.value)
for error in failure_object.errors:
print('A partial failure at index '
f'{error.location.field_path_elements[0].index} occurred.\n'
f'Error message: {error.message}\n'
f'Error code: {error.error_code}')
print('The operations are added to the offline user data job.')
# Issues a request to run the offline user data job for executing all
# added operations.
operation_response = (
offline_user_data_job_service_client.run_offline_user_data_job(
resource_name=offline_user_data_job_resource_name))
if skip_polling:
check_job_status(
client,
customer_id,
offline_user_data_job_resource_name,
user_list_resource_name,
filename
)
else:
# Wait until the operation has finished.
print('Request to execute the added operations started.')
print('Waiting until operation completes...')
operation_response.result()
print_customer_match_user_list_info(client, customer_id,
user_list_resource_name)
def build_offline_user_data_job_operations(client, customer_data):
"""Builds the schema of user data as defined in the API.
Args:
client: The Google Ads client.
customer_data: Processed customer data to be uploaded.
Returns:
A list containing the operations.
"""
customer_data_operations = []
for data_type in customer_data:
for item in customer_data[data_type]:
# Creates a first user data based on an email address.
user_data_operation = client.get_type('OfflineUserDataJobOperation')
user_data = user_data_operation.create
user_identifier = client.get_type('UserIdentifier')
if data_type == 'emails':
user_identifier.hashed_email = item['hashed_email']
elif data_type == 'phones':
user_identifier.hashed_phone_number = item['hashed_phone_number']
elif data_type == 'mobile_ids':
user_identifier.mobile_id = item['mobile_id']
elif data_type == 'user_ids':
user_identifier.third_party_user_id = item['third_party_user_id']
elif data_type == 'addresses':
user_identifier.address_info.hashed_first_name = item[
'hashed_first_name']
user_identifier.address_info.hashed_last_name = item['hashed_last_name']
user_identifier.address_info.country_code = item['country_code']
user_identifier.address_info.postal_code = item['postal_code']
user_data.user_identifiers.append(user_identifier)
customer_data_operations.append(user_data_operation)
return customer_data_operations
On Thursday, January 5, 2023 at 7:14:22 PM UTC+5:30 adsapi wrote:
> Hi Chethan,
>
> Thank you for bringing this to our attention.
>
> With regard to your questions, please refer to my answers below:
>
> *1.From this code can you confirm me whether I'm uploading as different
> user identifiers or what?*
> Upon checking your code below, I can confirm that you are uploading a
> different user identifiers.
>
>
>
>
>
>
>
>
>
>
>
>
>
>
> *if data_type == 'emails': user_identifier.hashed_email =
> item['hashed_email'] elif data_type == 'phones':
> user_identifier.hashed_phone_number = item['hashed_phone_number'] elif
> data_type == 'mobile_ids': user_identifier.mobile_id = item['mobile_id']
> elif data_type == 'user_ids': user_identifier.third_party_user_id =
> item['third_party_user_id'] elif data_type == 'addresses':
> user_identifier.address_info.hashed_first_name = item[ 'hashed_first_name']
> user_identifier.address_info.hashed_last_name = item['hashed_last_name']
> user_identifier.address_info.country_code = item['country_code']
> user_identifier.address_info.postal_code = item['postal_code']*
>
> *2.If I'm uploading as different user identifier then is it possible to
> upload user identifiers having > 1 lakh count?*
> If you are using OfflineUserDataJobService.AddOfflineUserDataJobOperations
> method and this is up to 1,000,000 identifiers to a single job.
>
> *3.If possible how can I upload more than 1 lakh user identifiers?*
> If you need to upload more than 100,000 identifiers for a job, you may
> need to send multiple requests with the same job resource_name. For more
> information, you may check this document
> <https://developers.google.com/google-ads/api/docs/remarketing/audience-types/customer-match#customer_match_considerations>
> .
>
> Best regards,
> [image: Google Logo]
> Jinky
> Google Ads API Team
>
>
> ref:_00D1U1174p._5004Q2hZOVp:ref
>
--
--
=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
Also find us on our blog:
https://googleadsdeveloper.blogspot.com/
=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~=~
You received this message because you are subscribed to the Google
Groups "AdWords API and Google Ads API Forum" group.
To post to this group, send email to [email protected]
To unsubscribe from this group, send email to
[email protected]
For more options, visit this group at
http://groups.google.com/group/adwords-api?hl=en
---
You received this message because you are subscribed to the Google Groups
"Google Ads API and AdWords API Forum" group.
To unsubscribe from this group and stop receiving emails from it, send an email
to [email protected].
To view this discussion on the web visit
https://groups.google.com/d/msgid/adwords-api/8f040f8d-0d2f-4ed6-b521-ae2269e29ad1n%40googlegroups.com.