docstring
stringlengths 52
499
| function
stringlengths 67
35.2k
| __index_level_0__
int64 52.6k
1.16M
|
---|---|---|
Use this method to add contacts to your Telegram address book.
Args:
contacts (List of :obj:`InputPhoneContact <pyrogram.InputPhoneContact>`):
The contact list to be added
Returns:
On success, the added contacts are returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. | def add_contacts(
self,
contacts: List["pyrogram.InputPhoneContact"]
):
imported_contacts = self.send(
functions.contacts.ImportContacts(
contacts=contacts
)
)
return imported_contacts | 150,658 |
Use this method to turn off the Two-Step Verification security feature (Cloud Password) on your account.
Args:
password (``str``):
Your current password.
Returns:
True on success.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
``ValueError`` in case there is no cloud password to remove. | def remove_cloud_password(
self,
password: str
) -> bool:
r = self.send(functions.account.GetPassword())
if not r.has_password:
raise ValueError("There is no cloud password to remove")
self.send(
functions.account.UpdatePasswordSettings(
password=compute_check(r, password),
new_settings=types.account.PasswordInputSettings(
new_algo=types.PasswordKdfAlgoUnknown(),
new_password_hash=b"",
hint=""
)
)
)
return True | 150,679 |
Use this method to delete contacts from your Telegram address book.
Args:
ids (List of ``int``):
A list of unique identifiers for the target users.
Can be an ID (int), a username (string) or phone number (string).
Returns:
True on success.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. | def delete_contacts(
self,
ids: List[int]
):
contacts = []
for i in ids:
try:
input_user = self.resolve_peer(i)
except PeerIdInvalid:
continue
else:
if isinstance(input_user, types.InputPeerUser):
contacts.append(input_user)
return self.send(
functions.contacts.DeleteContacts(
id=contacts
)
) | 150,684 |
Filter messages that match a given RegEx pattern.
Args:
pattern (``str``):
The RegEx pattern as string, it will be applied to the text of a message. When a pattern matches,
all the `Match Objects <https://docs.python.org/3/library/re.html#match-objects>`_
are stored in the *matches* field of the :class:`Message <pyrogram.Message>` itself.
flags (``int``, *optional*):
RegEx flags. | def regex(pattern, flags: int = 0):
def f(_, m):
m.matches = [i for i in _.p.finditer(m.text or m.caption or "")]
return bool(m.matches)
return create("Regex", f, p=re.compile(pattern, flags)) | 150,744 |
Initializer.
Args:
name: The name, e.g. 'Pattern'.
type_var: The type parameter, e.g. AnyStr, or the
specific type, e.g. str.
impl_type: The implementation type.
type_checker: Function that takes an impl_type instance.
and returns a value that should be a type_var instance. | def __init__(self, name, type_var, impl_type, type_checker):
assert isinstance(name, str), repr(name)
assert isinstance(impl_type, type), repr(impl_type)
assert not isinstance(impl_type, TypingMeta), repr(impl_type)
assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
self.name = name
self.type_var = type_var
self.impl_type = impl_type
self.type_checker = type_checker | 150,785 |
Use this method to join a group chat or channel.
Args:
chat_id (``str``):
Unique identifier for the target chat in form of a *t.me/joinchat/* link or username of the target
channel/supergroup (in the format @username).
Returns:
On success, a :obj:`Chat <pyrogram.Chat>` object is returned.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. | def join_chat(
self,
chat_id: str
):
match = self.INVITE_LINK_RE.match(chat_id)
if match:
chat = self.send(
functions.messages.ImportChatInvite(
hash=match.group(1)
)
)
if isinstance(chat.chats[0], types.Chat):
return pyrogram.Chat._parse_chat_chat(self, chat.chats[0])
elif isinstance(chat.chats[0], types.Channel):
return pyrogram.Chat._parse_channel_chat(self, chat.chats[0])
else:
resolved_peer = self.send(
functions.contacts.ResolveUsername(
username=chat_id.lower().strip("@")
)
)
channel = types.InputPeerChannel(
channel_id=resolved_peer.chats[0].id,
access_hash=resolved_peer.chats[0].access_hash
)
chat = self.send(
functions.channels.JoinChannel(
channel=channel
)
)
return pyrogram.Chat._parse_channel_chat(self, chat.chats[0]) | 150,863 |
Use this method to delete your own profile photos.
Args:
id (``str`` | ``list``):
A single :obj:`Photo <pyrogram.Photo>` id as string or multiple ids as list of strings for deleting
more than one photos at once.
Returns:
True on success.
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. | def delete_user_profile_photos(
self,
id: Union[str, List[str]]
) -> bool:
id = id if isinstance(id, list) else [id]
input_photos = []
for i in id:
s = unpack("<qq", b64decode(i + "=" * (-len(i) % 4), "-_"))
input_photos.append(
types.InputPhoto(
id=s[0],
access_hash=s[1],
file_reference=b""
)
)
return bool(self.send(
functions.photos.DeletePhotos(
id=input_photos
)
)) | 150,876 |
Use this method to leave a group chat or channel.
Args:
chat_id (``int`` | ``str``):
Unique identifier for the target chat or username of the target channel/supergroup
(in the format @username).
delete (``bool``, *optional*):
Deletes the group chat dialog after leaving (for simple group chats, not supergroups).
Raises:
:class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. | def leave_chat(
self,
chat_id: Union[int, str],
delete: bool = False
):
peer = self.resolve_peer(chat_id)
if isinstance(peer, types.InputPeerChannel):
return self.send(
functions.channels.LeaveChannel(
channel=self.resolve_peer(chat_id)
)
)
elif isinstance(peer, types.InputPeerChat):
r = self.send(
functions.messages.DeleteChatUser(
chat_id=peer.chat_id,
user_id=types.InputPeerSelf()
)
)
if delete:
self.send(
functions.messages.DeleteHistory(
peer=peer,
max_id=0
)
)
return r | 150,884 |
Apply warping_function to some Input data
Parameters:
-----------
X : array_like, shape = (n_samples, n_features)
test_data: bool, optional
Default to False, should set to True when transforming test data
Returns
-------
X_warped : array_like, shape = (n_samples, n_features)
The warped input data
Math
----
f(x) = 1 - (1 - x^a)^b | def f(self, X, test_data=False):
X_warped = X.copy()
if test_data:
X_normalized = (X - self.Xmin) / (self.Xmax - self.Xmin)
else:
X_normalized = self.X_normalized
for i_seq, i_fea in enumerate(self.warping_indices):
a, b = self.params[i_seq][0], self.params[i_seq][1]
X_warped[:, i_fea] = 1 - np.power(1 - np.power(X_normalized[:, i_fea], a), b)
return X_warped | 151,687 |
Get the log likelihood of a combined set of clusters, fitting the offsets
arguments:
inputs -- the 'X's in a list, one item per cluster
data -- the 'Y's in a list, one item per cluster
clust -- list of clusters to use
returns a tuple:
log likelihood and the offset | def get_log_likelihood_offset(inputs,data,clust):
#if we've only got one cluster, the model has an error, so we want to just
#use normal GPRegression.
if len(clust)==1:
return get_log_likelihood(inputs,data,clust)
S = data[0].shape[0] #number of time series
X = np.zeros([0,2]) #notice the extra column, this is for the cluster index
Y = np.zeros([0,S])
#for each person in the cluster, add their inputs and data to the new
#dataset. Note we add an index identifying which person is which data point.
#This is for the offset model to use, to allow it to know which data points
#to shift.
for i,p in enumerate(clust):
idx = i*np.ones([inputs[p].shape[0],1])
X = np.vstack([X,np.hstack([inputs[p],idx])])
Y = np.vstack([Y,data[p].T])
m = GPy.models.GPOffsetRegression(X,Y)
#TODO: How to select a sensible prior?
m.offset.set_prior(GPy.priors.Gaussian(0,20))
#TODO: Set a sensible start value for the length scale,
#make it long to help the offset fit.
m.optimize()
ll = m.log_likelihood()
offset = m.offset.values[0]
return ll,offset | 151,870 |
Creates ExpandedTextAds that use ad customizations for specified AdGroups.
Args:
client: an AdWordsClient instance.
adgroup_ids: a list containing the AdGroup ids to add ExpandedTextAds to.
feed_name: the name of the feed used to apply customizations.
Raises:
GoogleAdsError: if no ExpandedTextAds were added. | def CreateAdsWithCustomizations(client, adgroup_ids, feed_name):
# Get the AdGroupAdService
adgroup_ad_service = client.GetService('AdGroupAdService', 'v201809')
expanded_text_ad = {
'xsi_type': 'ExpandedTextAd',
'headlinePart1': 'Luxury Cruise to {=%s.Name}' % feed_name,
'headlinePart2': 'Only {=%s.Price}' % feed_name,
'description': 'Offer ends in {=countdown(%s.Date)}!' % feed_name,
'finalUrls': ['http://www.example.com'],
}
# We add the same ad to both ad groups. When they serve, they will show
# different values, since they match different feed items.
operations = [{
'operator': 'ADD',
'operand': {
'adGroupId': adgroup,
'ad': expanded_text_ad
}
} for adgroup in adgroup_ids]
response = adgroup_ad_service.mutate(operations)
if response and 'value' in response:
for ad in response['value']:
print ('Created an ad with ID "%s", type "%s", and status "%s".'
% (ad['ad']['id'], ad['ad']['Ad.Type'], ad['status']))
else:
raise errors.GoogleAdsError('No ads were added.') | 152,643 |
Creates a new AdCustomizerFeed.
Args:
client: an AdWordsClient instance.
feed_name: the name for the new AdCustomizerFeed.
Returns:
The new AdCustomizerFeed. | def CreateCustomizerFeed(client, feed_name):
# Get the AdCustomizerFeedService
ad_customizer_feed_service = client.GetService('AdCustomizerFeedService',
'v201809')
customizer_feed = {
'feedName': feed_name,
'feedAttributes': [
{'type': 'STRING', 'name': 'Name'},
{'type': 'STRING', 'name': 'Price'},
{'type': 'DATE_TIME', 'name': 'Date'}
]
}
feed_service_operation = {
'operator': 'ADD',
'operand': customizer_feed
}
response = ad_customizer_feed_service.mutate([feed_service_operation])
if response and 'value' in response:
feed = response['value'][0]
feed_data = {
'feedId': feed['feedId'],
'nameId': feed['feedAttributes'][0]['id'],
'priceId': feed['feedAttributes'][1]['id'],
'dateId': feed['feedAttributes'][2]['id']
}
print ('Feed with name "%s" and ID %s was added with:\n'
'\tName attribute ID %s and price attribute ID %s and date attribute'
'ID %s') % (feed['feedName'], feed['feedId'], feed_data['nameId'],
feed_data['priceId'], feed_data['dateId'])
return feed
else:
raise errors.GoogleAdsError('No feeds were added') | 152,644 |
Restricts the feed item to an ad group.
Args:
client: an AdWordsClient instance.
feed_item: The feed item.
adgroup_id: The ad group ID. | def RestrictFeedItemToAdGroup(client, feed_item, adgroup_id):
# Get the FeedItemTargetService
feed_item_target_service = client.GetService(
'FeedItemTargetService', 'v201809')
# Optional: Restrict the first feed item to only serve with ads for the
# specified ad group ID.
ad_group_target = {
'xsi_type': 'FeedItemAdGroupTarget',
'feedId': feed_item['feedId'],
'feedItemId': feed_item['feedItemId'],
'adGroupId': adgroup_id
}
operation = {'operator': 'ADD', 'operand': ad_group_target}
response = feed_item_target_service.mutate([operation])
new_ad_group_target = response['value'][0]
print('Feed item target for feed ID %s and feed item ID %s was created to '
'restrict serving to ad group ID %s' %
(new_ad_group_target['feedId'],
new_ad_group_target['feedItemId'],
new_ad_group_target['adGroupId'])) | 152,645 |
Creates FeedItems for the specified AdGroups.
These FeedItems contain values to use in ad customizations for the AdGroups.
Args:
client: an AdWordsClient instance.
adgroup_ids: a list containing two AdGroup Ids.
ad_customizer_feed: the AdCustomizerFeed we're associating the FeedItems
with.
Raises:
GoogleAdsError: if no FeedItems were added. | def CreateCustomizerFeedItems(client, adgroup_ids, ad_customizer_feed):
# Get the FeedItemService
feed_item_service = client.GetService('FeedItemService', 'v201809')
now = datetime.now()
mars_date = datetime(now.year, now.month, 1, 0, 0)
venus_date = datetime(now.year, now.month, 15, 0, 0)
time_format = '%Y%m%d %H%M%S'
feed_item_operations = [
CreateFeedItemAddOperation(
'Mars', '$1234.56', mars_date.strftime(time_format),
ad_customizer_feed),
CreateFeedItemAddOperation(
'Venus', '$1450.00', venus_date.strftime(time_format),
ad_customizer_feed)
]
response = feed_item_service.mutate(feed_item_operations)
if 'value' in response:
for feed_item in response['value']:
print 'Added FeedItem with ID %d.' % feed_item['feedItemId']
else:
raise errors.GoogleAdsError('No FeedItems were added.')
for feed_item, adgroup_id in zip(response['value'], adgroup_ids):
RestrictFeedItemToAdGroup(client, feed_item, adgroup_id) | 152,646 |
Creates a FeedItemOperation.
The generated FeedItemOperation will create a FeedItem with the specified
values when sent to FeedItemService.mutate.
Args:
name: the value for the name attribute of the FeedItem.
price: the value for the price attribute of the FeedItem.
date: the value for the date attribute of the FeedItem.
ad_customizer_feed: the AdCustomizerFeed we're associating the FeedItems
with.
Returns:
A new FeedItemOperation for adding a FeedItem. | def CreateFeedItemAddOperation(name, price, date, ad_customizer_feed):
feed_item = {
'feedId': ad_customizer_feed['feedId'],
'attributeValues': [
{
'feedAttributeId': ad_customizer_feed['feedAttributes'][0]['id'],
'stringValue': name
},
{
'feedAttributeId': ad_customizer_feed['feedAttributes'][1]['id'],
'stringValue': price
},
{
'feedAttributeId': ad_customizer_feed['feedAttributes'][2]['id'],
'stringValue': date
}
]
}
operation = {
'operator': 'ADD',
'operand': feed_item
}
return operation | 152,647 |
Creates an AdWordsClient with information stored in a yaml string.
Args:
yaml_doc: The yaml string containing the cached AdWords data.
Returns:
An AdWordsClient initialized with the values cached in the string.
Raises:
A GoogleAdsValueError if the given yaml string does not contain the
information necessary to instantiate a client object - either a
required key was missing or an OAuth2 key was missing. | def LoadFromString(cls, yaml_doc):
return cls(**googleads.common.LoadFromString(
yaml_doc, cls._YAML_KEY, cls._REQUIRED_INIT_VALUES,
cls._OPTIONAL_INIT_VALUES)) | 152,659 |
Creates an AdWordsClient with information stored in a yaml file.
Args:
[optional]
path: The path string to the file containing cached AdWords data.
Returns:
An AdWordsClient initialized with the values cached in the file.
Raises:
A GoogleAdsValueError if the given yaml file does not contain the
information necessary to instantiate a client object - either a
required key was missing or an OAuth2 key was missing. | def LoadFromStorage(cls, path=None):
if path is None:
path = os.path.join(os.path.expanduser('~'), 'googleads.yaml')
return cls(**googleads.common.LoadFromStorage(
path, cls._YAML_KEY, cls._REQUIRED_INIT_VALUES,
cls._OPTIONAL_INIT_VALUES)) | 152,660 |
Returns the SOAP headers required for request authorization.
Args:
create_method: The SOAP library specific method used to instantiate SOAP
objects.
Returns:
A SOAP object containing the headers. | def GetSOAPHeaders(self, create_method):
header = create_method(self._SOAP_HEADER_CLASS % self._version)
header.clientCustomerId = self._adwords_client.client_customer_id
header.developerToken = self._adwords_client.developer_token
header.userAgent = ''.join([
self._adwords_client.user_agent,
googleads.common.GenerateLibSig(self._PRODUCT_SIG)])
header.validateOnly = self._adwords_client.validate_only
header.partialFailure = self._adwords_client.partial_failure
return header | 152,666 |
Pack the given object using AdWords-specific logic.
Args:
obj: an object to be packed for SOAP using AdWords-specific logic, if
applicable.
version: the version of the current API, e.g. 'v201809'
Returns:
The given object packed with AdWords-specific logic for SOAP, if
applicable. Otherwise, returns the given object unmodified. | def Pack(cls, obj, version):
if isinstance(obj, ServiceQuery):
return str(obj)
return obj | 152,669 |
Initializes the IncrementalUpload.
Args:
request_builder: an AbstractUploadRequestBuilder instance.
upload_url: a string url provided by the BatchJobService.
current_content_length: an integer identifying the current content length
of data uploaded to the Batch Job.
is_last: a boolean indicating whether this is the final increment.
Raises:
GoogleAdsValueError: if the content length is lower than 0. | def __init__(self, request_builder, upload_url, current_content_length=0,
is_last=False):
self._request_builder = request_builder
if current_content_length < 0:
raise googleads.errors.GoogleAdsValueError(
'Current content length %s is < 0.' % current_content_length)
self._current_content_length = current_content_length
self._is_last = is_last
self._url_opener = urllib2.build_opener(
*self._request_builder.client.proxy_config.GetHandlers())
if self._request_builder.client.custom_http_headers:
self._url_opener.addheaders.extend(
self._request_builder.client.custom_http_headers.items())
self._upload_url = self._InitializeURL(upload_url, current_content_length) | 152,671 |
Ensures that the URL used to upload operations is properly initialized.
Args:
upload_url: a string url.
current_content_length: an integer identifying the current content length
of data uploaded to the Batch Job.
Returns:
An initialized string URL, or the provided string URL if the URL has
already been initialized. | def _InitializeURL(self, upload_url, current_content_length):
# If initialization is not necessary, return the provided upload_url.
if current_content_length != 0:
return upload_url
headers = {
'Content-Type': 'application/xml',
'Content-Length': 0,
'x-goog-resumable': 'start'
}
# Send an HTTP POST request to the given upload_url
req = urllib2.Request(upload_url, data={}, headers=headers)
resp = self._url_opener.open(req)
return resp.headers['location'] | 152,672 |
Serialize the IncrementalUploadHelper and store in file-like object.
Args:
output: a file-like object where the status of the IncrementalUploadHelper
will be written.
Raises:
GoogleAdsError: If a YAMLError occurs while writing to the file. | def Dump(self, output):
data = {
'current_content_length': self._current_content_length,
'is_last': self._is_last,
'server': self._request_builder.GetServer(),
'upload_url': self._upload_url,
'version': self._request_builder.GetVersion()
}
try:
yaml.dump(data, output)
except yaml.YAMLError as e:
raise googleads.errors.GoogleAdsError(
'Error dumping IncrementalUploadHelper to file: %s' % str(e)) | 152,673 |
Uploads operations to the given uploadUrl in incremental steps.
Note: Each list of operations is expected to contain operations of the
same type, similar to how one would normally send operations in an
AdWords API Service request.
Args:
operations: one or more lists of operations as would be sent to the
AdWords API for the associated service.
is_last: a boolean indicating whether this is the final increment to be
added to the batch job. | def UploadOperations(self, operations, is_last=False):
if self._is_last:
raise googleads.errors.AdWordsBatchJobServiceInvalidOperationError(
'Can\'t add new operations to a completed incremental upload.')
# Build the request
req = self._request_builder.BuildUploadRequest(
self._upload_url, operations,
current_content_length=self._current_content_length, is_last=is_last)
# Make the request, ignoring the urllib2.HTTPError raised due to HTTP status
# code 308 (for resumable uploads).
try:
_batch_job_logger.debug('Outgoing request: %s %s %s',
req.get_full_url(), req.headers, req.data)
self._url_opener.open(req)
if _batch_job_logger.isEnabledFor(logging.INFO):
_batch_job_logger.info('Request summary: %s',
self._ExtractRequestSummaryFields(req))
except urllib2.HTTPError as e:
if e.code != 308:
if _batch_job_logger.isEnabledFor(logging.WARNING):
_batch_job_logger.warning(
'Request summary: %s',
self._ExtractRequestSummaryFields(req, error=e))
raise
# Update upload status.
self._current_content_length += len(req.data)
self._is_last = is_last | 152,674 |
Extract fields used in the summary logs.
Args:
request: a urllib2.Request instance configured to make the request.
[optional]
error: a urllib2.HttpError instance used to retrieve error details.
Returns:
A dict containing the fields to be output in the summary logs. | def _ExtractRequestSummaryFields(self, request, error=None):
headers = request.headers
summary_fields = {
'server': request.get_full_url(),
'contentRange': headers['Content-range'],
'contentLength': headers['Content-length']
}
if error:
summary_fields['isError'] = True
summary_fields['errorMessage'] = error.reason
else:
summary_fields['isError'] = False
return summary_fields | 152,675 |
Creates a WHERE builder using a provided field.
Args:
field: the field to be added as an argument in the WHERE clause.
Returns:
The created WHERE builder. | def Where(self, field):
where_builder = _WhereBuilder(self, field)
self.where_builders.append(where_builder)
return where_builder | 152,677 |
Creates the WHERE builder with specified query builder and field.
This class should be instantiated through _QueryBuilder.Where. Don't call
this constructor directly.
Args:
query_builder: The query builder that this WHERE builder links to.
field: The field to be used in the WHERE condition.
Returns:
The WHERE builder. | def __init__(self, query_builder, field):
self._field = field
self._query_builder = query_builder
self._awql = None | 152,678 |
Sets the type of the WHERE clause as "equal to".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def EqualTo(self, value):
self._awql = self._CreateSingleValueCondition(value, '=')
return self._query_builder | 152,679 |
Sets the type of the WHERE clause as "not equal to".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def NotEqualTo(self, value):
self._awql = self._CreateSingleValueCondition(value, '!=')
return self._query_builder | 152,680 |
Sets the type of the WHERE clause as "greater than".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def GreaterThan(self, value):
self._awql = self._CreateSingleValueCondition(value, '>')
return self._query_builder | 152,681 |
Sets the type of the WHERE clause as "greater than or equal to".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def GreaterThanOrEqualTo(self, value):
self._awql = self._CreateSingleValueCondition(value, '>=')
return self._query_builder | 152,682 |
Sets the type of the WHERE clause as "less than".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def LessThan(self, value):
self._awql = self._CreateSingleValueCondition(value, '<')
return self._query_builder | 152,683 |
Sets the type of the WHERE clause as "less than or equal to.
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def LessThanOrEqualTo(self, value):
self._awql = self._CreateSingleValueCondition(value, '<=')
return self._query_builder | 152,684 |
Sets the type of the WHERE clause as "starts with".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def StartsWith(self, value):
self._awql = self._CreateSingleValueCondition(value, 'STARTS_WITH')
return self._query_builder | 152,685 |
Sets the type of the WHERE clause as "starts with ignore case".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def StartsWithIgnoreCase(self, value):
self._awql = self._CreateSingleValueCondition(value,
'STARTS_WITH_IGNORE_CASE')
return self._query_builder | 152,686 |
Sets the type of the WHERE clause as "contains".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def Contains(self, value):
self._awql = self._CreateSingleValueCondition(value, 'CONTAINS')
return self._query_builder | 152,687 |
Sets the type of the WHERE clause as "contains ignore case".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def ContainsIgnoreCase(self, value):
self._awql = self._CreateSingleValueCondition(value, 'CONTAINS_IGNORE_CASE')
return self._query_builder | 152,688 |
Sets the type of the WHERE clause as "does not contain".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def DoesNotContain(self, value):
self._awql = self._CreateSingleValueCondition(value, 'DOES_NOT_CONTAIN')
return self._query_builder | 152,689 |
Sets the type of the WHERE clause as "doesn not contain ignore case".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def DoesNotContainIgnoreCase(self, value):
self._awql = self._CreateSingleValueCondition(
value, 'DOES_NOT_CONTAIN_IGNORE_CASE')
return self._query_builder | 152,690 |
Sets the type of the WHERE clause as "in".
Args:
*values: The values to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def In(self, *values):
self._awql = self._CreateMultipleValuesCondition(values, 'IN')
return self._query_builder | 152,691 |
Sets the type of the WHERE clause as "in".
Args:
*values: The values to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def NotIn(self, *values):
self._awql = self._CreateMultipleValuesCondition(values, 'NOT_IN')
return self._query_builder | 152,692 |
Sets the type of the WHERE clause as "contains any".
Args:
*values: The values to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def ContainsAny(self, *values):
self._awql = self._CreateMultipleValuesCondition(values, 'CONTAINS_ANY')
return self._query_builder | 152,693 |
Sets the type of the WHERE clause as "contains none".
Args:
*values: The values to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def ContainsNone(self, *values):
self._awql = self._CreateMultipleValuesCondition(values, 'CONTAINS_NONE')
return self._query_builder | 152,694 |
Sets the type of the WHERE clause as "contains all".
Args:
*values: The values to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. | def ContainsAll(self, *values):
self._awql = self._CreateMultipleValuesCondition(values, 'CONTAINS_ALL')
return self._query_builder | 152,695 |
A page generator for this service query and the provided service.
This generates a page as a result from using the provided service's query()
method until there are no more results to fetch.
Args:
service: The service object for making a query using this service query.
Yields:
A resulting page from querying the provided service. | def Pager(self, service):
has_page = True
while has_page:
page = service.query(self)
yield page
has_page = self.HasNext(page)
if has_page:
self.NextPage() | 152,701 |
Update the credentials associated with application user.
Args:
client_id: str Client Id retrieved from the developer's console.
client_secret: str Client Secret retrieved from the developer's console.
refresh_token: str Refresh token generated with the above client id/secret.
adwords_manager_cid: str Customer Id for the AdWords manager account.
developer_token: str Developer Token for the AdWords account. | def UpdateUserCredentials(client_id, client_secret, refresh_token,
adwords_manager_cid, developer_token):
app_user = AppUser.query(AppUser.user == users.get_current_user()).fetch()[0]
app_user.client_id = client_id
app_user.client_secret = client_secret
app_user.refresh_token = refresh_token
app_user.adwords_manager_cid = adwords_manager_cid
app_user.developer_token = developer_token
app_user.put() | 152,705 |
Creates a default partition.
Args:
client: an AdWordsClient instance.
ad_group_id: an integer ID for an ad group. | def CreateDefaultPartition(client, ad_group_id):
ad_group_criterion_service = client.GetService('AdGroupCriterionService',
version='v201809')
operations = [{
'operator': 'ADD',
'operand': {
'xsi_type': 'BiddableAdGroupCriterion',
'adGroupId': ad_group_id,
# Make sure that caseValue and parentCriterionId are left unspecified.
# This makes this partition as generic as possible to use as a
# fallback when others don't match.
'criterion': {
'xsi_type': 'ProductPartition',
'partitionType': 'UNIT'
},
'biddingStrategyConfiguration': {
'bids': [{
'xsi_type': 'CpcBid',
'bid': {
'microAmount': 500000
}
}]
}
}
}]
ad_group_criterion = ad_group_criterion_service.mutate(operations)['value'][0]
print ('Ad group criterion with ID "%d" in ad group with ID "%d" was added.'
% (ad_group_criterion['criterion']['id'],
ad_group_criterion['adGroupId'])) | 152,710 |
Sanitizes a field value from a Value object to a CSV suitable format.
Args:
pql_value: dict a dictionary containing the data for a single field of an
entity.
Returns:
str a CSV writer friendly value formatted by Value.Type. | def ConvertValueForCsv(pql_value):
if 'value' in pql_value:
field = pql_value['value']
elif 'values' in pql_value:
field = pql_value['values']
else:
field = None
if field:
if isinstance(field, list):
return ','.join(['"%s"' % str(ConvertValueForCsv(single_field))
for single_field in field])
else:
class_type = ad_manager.AdManagerClassType(pql_value)
if class_type == 'TextValue':
return field.replace('"', '""').encode('UTF8')
elif class_type == 'NumberValue':
return float(field) if '.' in field else int(field)
elif class_type == 'DateTimeValue':
return ConvertDateTimeToOffset(field)
elif class_type == 'DateValue':
return date(int(field['date']['year']),
int(field['date']['month']),
int(field['date']['day'])).isoformat()
else:
return field
else:
return '-' | 152,713 |
Converts the PQL formatted response for a dateTime object.
Output conforms to ISO 8061 format, e.g. 'YYYY-MM-DDTHH:MM:SSz.'
Args:
date_time_value: dict The date time value from the PQL response.
Returns:
str: A string representation of the date time value uniform to
ReportService. | def ConvertDateTimeToOffset(date_time_value):
date_time_obj = datetime(int(date_time_value['date']['year']),
int(date_time_value['date']['month']),
int(date_time_value['date']['day']),
int(date_time_value['hour']),
int(date_time_value['minute']),
int(date_time_value['second']))
date_time_str = pytz.timezone(
date_time_value['timeZoneId']).localize(date_time_obj).isoformat()
if date_time_str[-5:] == '00:00':
return date_time_str[:-6] + 'Z'
else:
return date_time_str | 152,714 |
Creates a user identifier from the specified type and value.
Args:
identifier_type: a str specifying the type of user identifier.
value: a str value of the identifier; to be hashed using SHA-256 if needed.
Returns:
A dict specifying a user identifier, with a value hashed using SHA-256 if
needed. | def _CreateUserIdentifier(identifier_type=None, value=None):
if identifier_type in _HASHED_IDENTIFIER_TYPES:
# If the user identifier type is a hashed type, normalize and hash the
# value.
value = hashlib.sha256(value.strip().lower()).hexdigest()
user_identifier = {
'userIdentifierType': identifier_type,
'value': value
}
return user_identifier | 152,720 |
Retrieve the index of a given field in the api_error's fieldPathElements.
Args:
api_error: a dict containing a partialFailureError returned from the AdWords
API.
field: a str field for which this determines the index in the api_error's
fieldPathElements.
Returns:
An int index of the field path element, or None if the specified field can't
be found in the api_error. | def _GetFieldPathElementIndex(api_error, field):
field_path_elements = api_error['fieldPathElements']
if field_path_elements:
found_index = [field_path_element['index']
for field_path_element in field_path_elements
if field_path_element['field'] == field]
if found_index:
return found_index
return None | 152,721 |
Overrides the ingress function for response logging.
Args:
envelope: An Element with the SOAP request data.
http_headers: A dict of the current http headers.
operation: The SoapOperation instance.
Returns:
A tuple of the envelope and headers. | def ingress(self, envelope, http_headers, operation):
if self._logger.isEnabledFor(logging.DEBUG):
self._logger.debug(_RESPONSE_XML_LOG_LINE,
etree.tostring(envelope, pretty_print=True))
if self._logger.isEnabledFor(logging.WARN):
warn_data = {}
header = envelope.find(_HEADER_XPATH)
fault = envelope.find(_FAULT_XPATH)
if fault is not None:
warn_data['faultMessage'] = fault.find('faultstring').text
if header is not None:
header_data = {
re.sub(_REMOVE_NS_REGEXP, '', child.tag): child.text
for child in header[0]}
warn_data.update(header_data)
if 'serviceName' not in warn_data:
warn_data['serviceName'] = operation.binding.wsdl.services.keys()[0]
if 'methodName' not in warn_data:
warn_data['methodName'] = operation.name
self._logger.warn('Error summary: %s', warn_data)
return envelope, http_headers | 152,756 |
Overrides the egress function ror request logging.
Args:
envelope: An Element with the SOAP request data.
http_headers: A dict of the current http headers.
operation: The SoapOperation instance.
binding_options: An options dict for the SOAP binding.
Returns:
A tuple of the envelope and headers. | def egress(self, envelope, http_headers, operation, binding_options):
if self._logger.isEnabledFor(logging.INFO):
service_name = operation.binding.wsdl.services.keys()[0]
self._logger.info(_REQUEST_LOG_LINE, service_name, operation.name,
binding_options['address'])
if self._logger.isEnabledFor(logging.DEBUG):
http_headers_safe = http_headers.copy()
if self._AUTHORIZATION_HEADER in http_headers_safe:
http_headers_safe[self._AUTHORIZATION_HEADER] = self._REDACTED
request_string = etree.tostring(envelope, pretty_print=True)
safe_request = self._DEVELOPER_TOKEN_SUB.sub(
self._REDACTED, request_string.decode('utf-8'))
self._logger.debug(
_REQUEST_XML_LOG_LINE, http_headers_safe, safe_request)
return envelope, http_headers | 152,757 |
Restrict a feed item to a geo target location.
Args:
client: An AdWordsClient instance.
feed_item: A FeedItem.
location_id: The Id of the location to restrict to. | def RestrictFeedItemToGeoTarget(client, feed_item, location_id):
# Retrieve the FeedItemTargetService
feed_item_target_service = client.GetService(
'FeedItemTargetService', version='v201809')
# Optional: Restrict the first feed item to only serve with ads for the
# specified geo target.
criterion_target = {
'xsi_type': 'FeedItemCriterionTarget',
'feedId': feed_item['feedId'],
'feedItemId': feed_item['feedItemId'],
# These IDs can be found in the documentation or retrieved with the
# LocationCriterionService.
'criterion': {
'xsi_type': 'Location',
'id': location_id
}
}
operation = {'operator': 'ADD', 'operand': criterion_target}
response = feed_item_target_service.mutate([operation])
new_location_target = response['value'][0]
print('Feed item target for feed ID %d and feed item ID %d was created to '
'restrict serving to location ID %d.' %
(new_location_target['feedId'],
new_location_target['feedItemId'],
new_location_target['criterion']['id'])) | 152,764 |
Adds a new Smart Shopping campaign.
Args:
client: an AdWordsClient instance.
budget_id: the str ID of the budget to be associated with the Shopping
campaign.
merchant_id: the str ID of the merchant account to be associated with the
Shopping campaign.
Returns:
A campaign ID. | def CreateSmartCampaign(client, budget_id, merchant_id):
campaign_service = client.GetService('CampaignService', version='v201809')
# Create campaign with required and optional settings.
campaign = {
'name': 'Shopping campaign #%s' % uuid.uuid4(),
# The advertisingChannelType is what makes this a Shopping campaign.
'advertisingChannelType': 'SHOPPING',
# Sets the advertisingChannelSubType to SHOPPING_GOAL_OPTIMIZED_ADS to
# make this a Smart Shopping campaign.
'advertisingChannelSubType': 'SHOPPING_GOAL_OPTIMIZED_ADS',
# Recommendation: Set the campaign to PAUSED when creating it to stop the
# ads from immediately serving. Set to ENABLED once you've added targeting
# and the ads are ready to serve.
'status': 'PAUSED',
# Set portfolio budget (required).
'budget': {'budgetId': budget_id},
# Set a bidding strategy. Only MAXIMIZE_CONVERSION_VALUE is supported.
'biddingStrategyConfiguration': {
'biddingStrategyType': 'MAXIMIZE_CONVERSION_VALUE'
},
'settings': [{
# All Shopping campaigns need a ShoppingSetting.
'xsi_type': 'ShoppingSetting',
'salesCountry': 'US',
'merchantId': merchant_id
}]
}
campaign_operations = [{
'operator': 'ADD',
'operand': campaign
}]
result = campaign_service.mutate(campaign_operations)['value'][0]
print ('Smart Shopping campaign with name "%s" and ID "%s" was added.'
% (result['name'], result['id']))
return result['id'] | 152,774 |
Adds a new Smart Shopping ad group.
Args:
client: an AdWordsClient instance.
campaign_id: the str ID of a Smart Shopping campaign.
Returns:
An ad group ID. | def CreateSmartShoppingAdGroup(client, campaign_id):
ad_group_service = client.GetService('AdGroupService', version='v201809')
# Create the ad group.
ad_group = {
'campaignId': campaign_id,
'name': 'Smart Shopping ad group #%s' % uuid.uuid4(),
# Set the ad group type to SHOPPING_GOAL_OPTIMIZED_ADS.
'adGroupType': 'SHOPPING_GOAL_OPTIMIZED_ADS'
}
adgroup_operations = {
'operator': 'ADD',
'operand': ad_group
}
# Make the mutate request to add the AdGroup to the Smart Shopping campaign.
ad_group = ad_group_service.mutate(adgroup_operations)['value'][0]
ad_group_id = ad_group['id']
print ('AdGroup with name "%s" and ID "%s" was added.'
% (ad_group['name'], ad_group_id))
return ad_group_id | 152,775 |
Adds a new Smart Shopping ad.
Args:
client: an AdWordsClient instance.
ad_group_id: an integer ID for an ad group. | def CreateSmartShoppingAd(client, ad_group_id):
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201809')
# Create an AdGroup Ad.
adgroup_ad = {
'adGroupId': ad_group_id,
# Create a Smart Shopping ad (Goal-optimized Shopping ad).
'ad': {
'xsi_type': 'GoalOptimizedShoppingAd'
}
}
ad_operation = {
'operator': 'ADD',
'operand': adgroup_ad
}
# Make the mutate request to add the Smart Shopping ad to the AdGroup.
ad_result = ad_group_ad_service.mutate([ad_operation])
for adgroup_ad in ad_result['value']:
print 'Smart Shopping ad with ID "%s" was added.' % adgroup_ad['ad']['id'] | 152,776 |
A simple convenience utility for adding months to a given start date.
This increments the months by adding the number of days in the current month
to the current month, for each month.
Args:
start_date: date The date months are being added to.
months: int The number of months to add.
Returns:
A date equal to the start date incremented by the given number of months. | def AddMonths(start_date, months):
current_date = start_date
i = 0
while i < months:
month_days = calendar.monthrange(current_date.year, current_date.month)[1]
current_date += timedelta(days=month_days)
i += 1
return current_date | 152,783 |
Displays mean average cpc, position, clicks, and total cost for estimate.
Args:
message: str message to display for the given estimate.
min_estimate: sudsobject containing a minimum estimate from the
TrafficEstimatorService response.
max_estimate: sudsobject containing a maximum estimate from the
TrafficEstimatorService response. | def DisplayEstimate(message, min_estimate, max_estimate):
# Find the mean of the min and max values.
mean_avg_cpc = (_CalculateMean(min_estimate['averageCpc']['microAmount'],
max_estimate['averageCpc']['microAmount'])
if 'averageCpc' in min_estimate
and min_estimate['averageCpc'] else None)
mean_avg_pos = (_CalculateMean(min_estimate['averagePosition'],
max_estimate['averagePosition'])
if 'averagePosition' in min_estimate
and min_estimate['averagePosition'] else None)
mean_clicks = _CalculateMean(min_estimate['clicksPerDay'],
max_estimate['clicksPerDay'])
mean_total_cost = _CalculateMean(min_estimate['totalCost']['microAmount'],
max_estimate['totalCost']['microAmount'])
print message
print ' Estimated average CPC: %s' % _FormatMean(mean_avg_cpc)
print ' Estimated ad position: %s' % _FormatMean(mean_avg_pos)
print ' Estimated daily clicks: %s' % _FormatMean(mean_clicks)
print ' Estimated daily cost: %s' % _FormatMean(mean_total_cost) | 152,788 |
Format a SOAP DateTime object for printing.
Args:
value: The DateTime object to format.
Returns:
A string representing the value. | def FormatSOAPDateTime(value):
value_date = value['date']
return '%s-%s-%s %s:%s:%s (%s)' % (
value_date['year'], value_date['month'], value_date['day'],
value['hour'], value['minute'], value['second'], value['timeZoneId']) | 152,796 |
Calculate forecast percentage stats.
Args:
matched: The number of matched impressions.
available: The number of available impressions.
possible: The optional number of possible impressions.
Returns:
The percentage of impressions that are available and possible. | def CalculateForecastStats(matched, available, possible=None):
if matched > 0:
available_percent = (float(available) / matched) * 100.
else:
available_percent = 0
if possible is not None:
if matched > 0:
possible_percent = (possible/float(matched)) * 100.
else:
possible_percent = 0
else:
possible_percent = None
return available_percent, possible_percent | 152,797 |
Creates a shopping campaign with the given budget and merchant IDs.
Args:
client: an AdWordsClient instance.
budget_id: the str ID of the budget to be associated with the shopping
campaign.
merchant_id: the str ID of the merchant account to be associated with the
shopping campaign.
Returns:
The created Shopping Campaign as a sudsobject. | def CreateShoppingCampaign(client, budget_id, merchant_id):
campaign_service = client.GetService('CampaignService', 'v201809')
campaign = {
'name': 'Shopping campaign #%s' % uuid.uuid4(),
# The advertisingChannelType is what makes this a shopping campaign
'advertisingChannelType': 'SHOPPING',
# Recommendation: Set the campaign to PAUSED when creating it to stop the
# ads from immediately serving. Set to ENABLED once you've added targeting
# and the ads are ready to serve.
'status': 'PAUSED',
# Set portfolio budget (required)
'budget': {
'budgetId': budget_id
},
'biddingStrategyConfiguration': {
'biddingStrategyType': 'MANUAL_CPC'
},
'settings': [
# All shopping campaigns need a ShoppingSetting
{
'xsi_type': 'ShoppingSetting',
'salesCountry': 'US',
'campaignPriority': '0',
'merchantId': merchant_id,
# Set to "True" to enable Local Inventory Ads in your campaign.
'enableLocal': True
}
]
}
campaign_operations = [{
'operator': 'ADD',
'operand': campaign
}]
campaign = campaign_service.mutate(campaign_operations)['value'][0]
print ('Campaign with name "%s" and ID "%s" was added.'
% (campaign['name'], campaign['id']))
return campaign | 152,803 |
Creates an AdGroup for the given shopping campaign ID.
Args:
client: an AdWordsClient instance.
campaign_id: the str ID of a shopping campaign.
Returns:
The created AdGroup as a sudsobject. | def CreateAdGroup(client, campaign_id):
ad_group_service = client.GetService('AdGroupService', 'v201809')
adgroup = {
# Required: Set the ad group type to SHOPPING_SHOWCASE_ADS
'adGroupType': 'SHOPPING_SHOWCASE_ADS',
'campaignId': campaign_id,
'name': 'AdGroup #%s' % uuid.uuid4(),
# REQUIRED: Set the ad group's bidding strategy configuration.
'biddingStrategyConfiguration': {
# Showcase ads require either ManualCpc or EnhancedCpc.
'biddingStrategyType': 'MANUAL_CPC',
# Optional: Set the bids
'bids': [{
'xsi_type': 'CpcBid',
'bid': {
'microAmount': 100000
}
}]
}
}
adgroup_operations = {
'operator': 'ADD',
'operand': adgroup
}
# Make the mutate request to add the AdGroup to the Shopping Campaign
adgroup = ad_group_service.mutate(adgroup_operations)['value'][0]
print ('AdGroup with name "%s" and ID "%s" was added.'
% (adgroup['name'], adgroup['id']))
return adgroup | 152,804 |
Creates a showcase add for the given AdGroup with the given images.
Args:
client: an AdWordsClient instance.
adgroup: a dict or suds object defining an AdGroup for a Shopping Campaign.
expanded_image_filepath: a str filepath to a .jpg file that will be used as
the Showcase Ad's expandedImage.
collapsed_image_filepath: a str filepath to a .jpg file that will be used as
the Showcase Ad's collapsedImage.
Returns:
The created Showcase Ad as a sudsobject. | def CreateShowcaseAd(client, adgroup, expanded_image_filepath,
collapsed_image_filepath):
ad_group_ad_service = client.GetService('AdGroupAdService', 'v201809')
showcase_ad = {
'adGroupId': adgroup['id'],
'ad': {
'xsi_type': 'ShowcaseAd',
'Ad.Type': 'ShowcaseAd',
# Required: set the ad's name, final URLs, and display URL.
'name': 'Showcase ad #%s' % uuid.uuid4(),
'finalUrls': 'http://example.com/showcase',
'displayUrl': 'example.com',
# Required: Set the ad's expanded image.
'expandedImage': {
'mediaId': UploadImage(client, expanded_image_filepath)['mediaId']
},
# Optional: Set the collapsed image.
'collapsedImage': {
'mediaId':
UploadImage(client, collapsed_image_filepath)['mediaId']
}
}
}
ad_operation = {
'operator': 'ADD',
'operand': showcase_ad
}
# Make the mutate request to add the ProductAd to the AdGroup
showcase_ad = ad_group_ad_service.mutate([ad_operation])['value'][0]
print 'ShowcaseAd with ID "%s" was added.' % showcase_ad['ad']['id']
return showcase_ad | 152,805 |
Uploads a .jpg image with the given filepath via the AdWords MediaService.
Args:
client: an AdWordsClient instance.
filepath: a str filepath to the .jpg file to be uploaded.
Returns:
The created Image as a sudsobject. | def UploadImage(client, filepath):
media_service = client.GetService('MediaService', 'v201809')
with open(filepath, 'rb') as image_handle:
image_data = image_handle.read().decode('utf-8')
image = [{
'xsi_type': 'Image',
'data': image_data,
'type': 'IMAGE'
}]
image = media_service.upload(image)[0]
return image | 152,806 |
Creates a ProductPartition tree for the given AdGroup ID.
Args:
client: an AdWordsClient instance.
adgroup_id: a str AdGroup ID.
Returns:
The ProductPartition tree as a sudsobject. | def CreateProductPartition(client, adgroup_id):
ad_group_criterion_service = client.GetService('AdGroupCriterionService',
'v201809')
helper = ProductPartitionHelper(adgroup_id)
root = helper.CreateSubdivision()
new_product_canonical_condition = {
'xsi_type': 'ProductCanonicalCondition',
'condition': 'NEW'
}
used_product_canonical_condition = {
'xsi_type': 'ProductCanonicalCondition',
'condition': 'USED'
}
other_product_canonical_condition = {
'xsi_type': 'ProductCanonicalCondition',
}
helper.CreateUnit(root, new_product_canonical_condition)
helper.CreateUnit(root, used_product_canonical_condition)
helper.CreateUnit(root, other_product_canonical_condition)
result = ad_group_criterion_service.mutate(helper.operations)
return result['value'] | 152,807 |
Initializer.
Args:
adgroup_id: The ID of the AdGroup that we wish to attach the partition
tree to. | def __init__(self, adgroup_id):
# The next temporary criterion ID to be used.
# When creating our tree we need to specify the parent-child relationships
# between nodes. However, until a criterion has been created on the server
# we do not have a criterion ID with which to refer to it.
# Instead we can specify temporary IDs that are specific to a single mutate
# request. Once the criteria have been created they are assigned an ID as
# normal and the temporary ID will no longer refer to it.
# A valid temporary ID is any negative integer.
self.next_id = -1
# The set of mutate operations needed to create the current tree.
self.operations = []
self.adgroup_id = adgroup_id | 152,808 |
Creates a subdivision node.
Args:
parent: The node that should be this node's parent.
value: The value being partitioned on.
Returns:
A new subdivision node. | def CreateSubdivision(self, parent=None, value=None):
division = {
'xsi_type': 'ProductPartition',
'partitionType': 'SUBDIVISION',
'id': str(self.next_id)
}
# The root has neither a parent nor a value.
if parent is not None:
division['parentCriterionId'] = parent['id']
division['caseValue'] = value
adgroup_criterion = {
'xsi_type': 'BiddableAdGroupCriterion',
'adGroupId': self.adgroup_id,
'criterion': division
}
self.CreateAddOperation(adgroup_criterion)
self.next_id -= 1
return division | 152,809 |
Creates a unit node.
Args:
parent: The node that should be this node's parent.
value: The value being partitioned on.
bid_amount: The amount to bid for matching products, in micros.
Returns:
A new unit node. | def CreateUnit(self, parent=None, value=None, bid_amount=None):
unit = {
'xsi_type': 'ProductPartition',
'partitionType': 'UNIT'
}
# The root node has neither a parent nor a value.
if parent is not None:
unit['parentCriterionId'] = parent['id']
unit['caseValue'] = value
if bid_amount is not None and bid_amount > 0:
# Note: Showcase ads require that the campaign has a ManualCpc
# BiddingStrategyConfiguration.
bidding_strategy_configuration = {
'bids': [{
'xsi_type': 'CpcBid',
'bid': {
'xsi_type': 'Money',
'microAmount': str(bid_amount)
}
}]
}
adgroup_criterion = {
'xsi_type': 'BiddableAdGroupCriterion',
'biddingStrategyConfiguration': bidding_strategy_configuration
}
else:
adgroup_criterion = {
'xsi_type': 'NegativeAdGroupCriterion'
}
adgroup_criterion['adGroupId'] = self.adgroup_id
adgroup_criterion['criterion'] = unit
self.CreateAddOperation(adgroup_criterion)
return unit | 152,810 |
Creates the budget.
Args:
client: an AdWordsClient instance.
Returns:
a suds.sudsobject.Object representation of the created budget. | def _CreateBudget(client):
budget_service = client.GetService('BudgetService', version='v201809')
# Create the campaign budget
operation = {
'operand': {
'name': 'Interplanetary Cruise Budget #%d' % uuid.uuid4(),
'deliveryMethod': 'STANDARD',
'amount': {
'microAmount': 500000
}
},
'operator': 'ADD'
}
budget = budget_service.mutate([operation])['value'][0]
print 'Budget with ID "%d" and name "%s" was created.' % (
budget['budgetId'], budget['name'])
return budget | 152,845 |
Creates the campaign.
Args:
client: an AdWordsClient instance.
budget: a suds.sudsobject.Object representation of a created budget.
Returns:
An integer campaign ID. | def _CreateCampaign(client, budget):
campaign_service = client.GetService('CampaignService')
operations = [{
'operator': 'ADD',
'operand': {
'name': 'Interplanetary Cruise #%d' % uuid.uuid4(),
# Recommendation: Set the campaign to PAUSED when creating it to
# prevent the ads from immediately serving. Set to ENABLED once you've
# added targeting and the ads are ready to serve.
'status': 'PAUSED',
'advertisingChannelType': 'SEARCH',
'biddingStrategyConfiguration': {
'biddingStrategyType': 'MANUAL_CPC',
},
'budget': budget,
# Required: Set the campaign's Dynamic Search Ad settings.
'settings': [{
'xsi_type': 'DynamicSearchAdsSetting',
# Required: Set the domain name and language.
'domainName': 'example.com',
'languageCode': 'en'
}],
# Optional: Set the start date.
'startDate': (datetime.datetime.now() +
datetime.timedelta(1)).strftime('%Y%m%d'),
# Optional: Set the end date.
'endDate': (datetime.datetime.now() +
datetime.timedelta(365)).strftime('%Y%m%d'),
}
}]
campaign = campaign_service.mutate(operations)['value'][0]
campaign_id = campaign['id']
print 'Campaign with ID "%d" and name "%s" was added.' % (
campaign_id, campaign['name'])
return campaign_id | 152,846 |
Creates an ad group.
Args:
client: an AdWordsClient instance.
campaign_id: an integer campaign ID.
Returns:
An integer ad group ID. | def _CreateAdGroup(client, campaign_id):
ad_group_service = client.GetService('AdGroupService')
operations = [{
'operator': 'ADD',
'operand': {
'campaignId': campaign_id,
'adGroupType': 'SEARCH_DYNAMIC_ADS',
'name': 'Earth to Mars Cruises #%d' % uuid.uuid4(),
'status': 'PAUSED',
'biddingStrategyConfiguration': {
'bids': [{
'xsi_type': 'CpcBid',
'bid': {
'microAmount': '3000000'
},
}]
}
}
}]
ad_group = ad_group_service.mutate(operations)['value'][0]
ad_group_id = ad_group['id']
print 'Ad group with ID "%d" and name "%s" was created.' % (
ad_group_id, ad_group['name'])
return ad_group_id | 152,847 |
Creates the expanded Dynamic Search Ad.
Args:
client: an AdwordsClient instance.
ad_group_id: an integer ID of the ad group in which the DSA is added. | def _CreateExpandedDSA(client, ad_group_id):
# Get the AdGroupAdService.
ad_group_ad_service = client.GetService('AdGroupAdService')
# Create the operation
operations = [{
'operator': 'ADD',
'operand': {
'xsi_type': 'AdGroupAd',
'adGroupId': ad_group_id,
# Create the expanded dynamic search ad. This ad will have its
# headline and final URL auto-generated at serving time according to
# domain name specific information provided by DynamicSearchAdsSetting
# at the campaign level.
'ad': {
'xsi_type': 'ExpandedDynamicSearchAd',
# Set the ad description.
'description': 'Buy your tickets now!',
'description2': 'Discount ends soon'
},
# Optional: Set the status.
'status': 'PAUSED',
}
}]
# Create the ad.
ad = ad_group_ad_service.mutate(operations)['value'][0]['ad']
# Display the results.
print ('Expanded dynamic search ad with ID "%d", description "%s", and '
'description 2 "%s" was added'
% (ad['id'], ad['description'], ad['description2'])) | 152,848 |
Adds a web page criterion to target Dynamic Search Ads.
Args:
client: an AdWordsClient instance.
ad_group_id: an integer ID of the ad group the criteria is being added to. | def _AddWebPageCriteria(client, ad_group_id):
ad_group_criterion_service = client.GetService('AdGroupCriterionService',
version='v201809')
operations = [{
'operator': 'ADD',
# Create biddable ad group criterion.
'operand': {
'xsi_type': 'BiddableAdGroupCriterion',
'adGroupId': ad_group_id,
# Create a webpage criterion for special offers for children.
'criterion': {
'xsi_type': 'Webpage',
'parameter': {
'criterionName': 'Special offers for children.',
'conditions': [
{
'operand': 'URL',
'argument': '/marscruise/children'
},
{
'operand': 'PAGE_TITLE',
'argument': 'Special Offer'
}
]
}
},
'userStatus': 'PAUSED',
# Optional: set a custom bid.
'biddingStrategyConfiguration': {
'bids': [{
'xsi_type': 'CpcBid',
'bid': {
'microAmount': 10000000L
}
}]
}
}
}]
criterion = ad_group_criterion_service.mutate(operations)['value'][0]
print 'Webpage criterion with ID "%d" was added to ad group ID "%d".' % (
criterion['criterion']['id'], criterion['adGroupId']) | 152,849 |
Uploads the image from the specified url.
Args:
client: An AdWordsClient instance.
url: The image URL.
Returns:
The ID of the uploaded image. | def UploadImageAsset(client, url):
# Initialize appropriate service.
asset_service = client.GetService('AssetService', version='v201809')
# Download the image.
image_request = requests.get(url)
# Create the image asset.
image_asset = {
'xsi_type': 'ImageAsset',
'imageData': image_request.content,
# This field is optional, and if provided should be unique.
# 'assetName': 'Image asset ' + str(uuid.uuid4()),
}
# Create the operation.
operation = {
'operator': 'ADD',
'operand': image_asset
}
# Create the asset and return the ID.
result = asset_service.mutate([operation])
return result['value'][0]['assetId'] | 152,854 |
Generates a library signature suitable for a user agent field.
Args:
short_name: The short, product-specific string name for the library.
Returns:
A library signature string to append to user-supplied user-agent value. | def GenerateLibSig(short_name):
with _UTILITY_LOCK:
utilities_used = ', '.join([utility for utility
in sorted(_utility_registry)])
_utility_registry.Clear()
if utilities_used:
return ' (%s, %s, %s, %s)' % (short_name, _COMMON_LIB_SIG, _PYTHON_VERSION,
utilities_used)
else:
return ' (%s, %s, %s)' % (short_name, _COMMON_LIB_SIG, _PYTHON_VERSION) | 152,859 |
Generates an GoogleOAuth2Client subclass using the given product_data.
Args:
product_yaml_key: a string key identifying the product being configured.
product_data: a dict containing the configurations for a given product.
proxy_config: a ProxyConfig instance.
Returns:
An instantiated GoogleOAuth2Client subclass.
Raises:
A GoogleAdsValueError if the OAuth2 configuration for the given product is
misconfigured. | def _ExtractOAuth2Client(product_yaml_key, product_data, proxy_config):
oauth2_kwargs = {
'proxy_config': proxy_config
}
if all(config in product_data for config in _OAUTH2_INSTALLED_APP_KEYS):
oauth2_args = [
product_data['client_id'], product_data['client_secret'],
product_data['refresh_token']
]
oauth2_client = googleads.oauth2.GoogleRefreshTokenClient
for key in _OAUTH2_INSTALLED_APP_KEYS:
del product_data[key]
elif all(config in product_data for config in _OAUTH2_SERVICE_ACCT_KEYS):
oauth2_args = [
product_data['path_to_private_key_file'],
googleads.oauth2.GetAPIScope(product_yaml_key),
]
oauth2_kwargs.update({
'sub': product_data.get('delegated_account')
})
oauth2_client = googleads.oauth2.GoogleServiceAccountClient
for key in _OAUTH2_SERVICE_ACCT_KEYS:
del product_data[key]
for optional_key in _OAUTH2_SERVICE_ACCT_KEYS_OPTIONAL:
if optional_key in product_data:
del product_data[optional_key]
else:
raise googleads.errors.GoogleAdsValueError(
'Your yaml file is incorrectly configured for OAuth2. You need to '
'specify credentials for either the installed application flow (%s) '
'or service account flow (%s).' %
(_OAUTH2_INSTALLED_APP_KEYS, _OAUTH2_SERVICE_ACCT_KEYS))
return oauth2_client(*oauth2_args, **oauth2_kwargs) | 152,862 |
Recurses over a nested structure to look for changes in Suds objects.
Args:
obj: A parameter for a SOAP request field which is to be inspected and
will be packed for Suds if an xsi_type is specified, otherwise will be
left unaltered.
factory: The suds.client.Factory object which can create instances of the
classes generated from the WSDL.
parent: The parent object that contains the obj parameter to be inspected. | def _RecurseOverObject(obj, factory, parent=None):
if _IsSudsIterable(obj):
# Since in-place modification of the Suds object is taking place, the
# iterator should be done over a frozen copy of the unpacked fields.
copy_of_obj = tuple(obj)
for item in copy_of_obj:
if _IsSudsIterable(item):
if 'xsi_type' in item:
if isinstance(obj, tuple):
parent[obj[0]] = _PackForSuds(obj[1], factory)
else:
obj.remove(item)
obj.append(_PackForSuds(item, factory))
_RecurseOverObject(item, factory, obj) | 152,865 |
Extract logging fields from the request's suds.sax.element.Element.
Args:
document: A suds.sax.element.Element instance containing the API request.
Returns:
A dict mapping logging field names to their corresponding value. | def _ExtractRequestSummaryFields(document):
headers = document.childAtPath('Header/RequestHeader')
body = document.childAtPath('Body')
summary_fields = {
'methodName': body.getChildren()[0].name
}
# Extract AdWords-specific fields if they exist.
# Note: We need to check if None because this will always evaluate False.
client_customer_id = headers.getChild('clientCustomerId')
if client_customer_id is not None:
summary_fields['clientCustomerId'] = client_customer_id.text
# Extract Ad Manager-specific fields if they exist.
# Note: We need to check if None because this will always evaluate False.
network_code = headers.getChild('networkCode')
if network_code is not None:
summary_fields['networkCode'] = network_code.text
return summary_fields | 152,867 |
Extract logging fields from the response's suds.sax.document.Document.
Args:
document: A suds.sax.document.Document instance containing the parsed
API response for a given API request.
Returns:
A dict mapping logging field names to their corresponding value. | def _ExtractResponseSummaryFields(document):
headers = document.childAtPath('Envelope/Header/ResponseHeader')
body = document.childAtPath('Envelope/Body')
summary_fields = {}
if headers is not None:
summary_fields['requestId'] = headers.getChild('requestId').text
summary_fields['responseTime'] = headers.getChild('responseTime').text
# Extract AdWords-specific summary fields if they are present.
# Note: We need to check if None because this will always evaluate False.
service_name = headers.getChild('serviceName')
if service_name is not None:
summary_fields['serviceName'] = service_name.text
method_name = headers.getChild('methodName')
if method_name is not None:
summary_fields['methodName'] = method_name.text
operations = headers.getChild('operations')
if operations is not None:
summary_fields['operations'] = operations.text
if body is not None:
# Extract fault if it exists.
fault = body.getChild('Fault')
if fault is not None:
summary_fields['isFault'] = True
# Cap length of faultstring to 16k characters for summary.
summary_fields['faultMessage'] = fault.getChild(
'faultstring').text[:16000]
else:
summary_fields['isFault'] = False
return summary_fields | 152,868 |
Initializes _ZeepProxyTransport.
Args:
timeout: An integer timeout in MS for connections.
proxy_config: A ProxyConfig instance representing proxy settings.
cache: A zeep.cache.Base instance representing a cache strategy to employ. | def __init__(self, timeout, proxy_config, cache):
if not cache:
cache = zeep.cache.SqliteCache()
elif cache == ZeepServiceProxy.NO_CACHE:
cache = None
super(_ZeepProxyTransport, self).__init__(
timeout=timeout, operation_timeout=timeout, cache=cache)
self.session.proxies = proxy_config.proxies | 152,873 |
Initializes SudsHTTPSTransport.
Args:
timeout: An integer for the connection timeout time.
proxy_config: A ProxyConfig instance representing proxy settings. | def __init__(self, timeout, proxy_config):
suds.transport.http.HttpTransport.__init__(self, timeout=timeout)
self.handlers = proxy_config.GetHandlers() | 152,874 |
Return an XML string representing a SOAP complex type.
Args:
type_name: The name of the type with namespace prefix if necessary.
value: A python dictionary to hydrate the type instance with.
Returns:
A string containing the SOAP XML for the type. | def GetSoapXMLForComplexType(self, type_name, value):
schema = self.suds_client.wsdl.schema
definition_type = schema.elements[(type_name, self._namespace_override)]
marshaller = suds.mx.literal.Literal(schema)
content = suds.mx.Content(
tag=type_name, value=value,
name=type_name, type=definition_type)
data = marshaller.process(content)
return data | 152,877 |
Return an XML string representing a SOAP complex type.
Args:
type_name: The name of the type with namespace prefix if necessary.
value: A python dictionary to hydrate the type instance with.
Returns:
A string containing the SOAP XML for the type. | def GetSoapXMLForComplexType(self, type_name, value):
element = self.schema.get_element(
'{%s}%s' % (self._namespace_override, type_name))
result_element = self._element_maker(element.qname.localname)
element_value = element(**value)
element.type.render(result_element, element_value)
data = lxml.etree.tostring(result_element).strip()
return data | 152,879 |
Initializes a SOAP service.
Args:
header_handler: A googleads.common.HeaderHandler instance used to set
SOAP and HTTP headers.
packer: A googleads.common.SoapPacker instance used to transform
entities.
version: the version of the current API, e.g. 'v201811' | def __init__(self, header_handler, packer, version):
self._header_handler = header_handler
self._packer = packer
self._version = version
self._method_proxies = {} | 152,880 |
Get the raw SOAP XML for a request.
Args:
method: The method name.
*args: A list of arguments to be passed to the method.
Returns:
An element containing the raw XML that would be sent as the request. | def GetRequestXML(self, method, *args):
self.suds_client.set_options(nosend=True)
service_request = (getattr(self, method))(*args).envelope
self.suds_client.set_options(nosend=False)
return lxml.etree.fromstring(service_request) | 152,883 |
Set the headers for the underlying client.
Args:
soap_headers: A SOAP element for the SOAP headers.
http_headers: A dictionary for the http headers. | def SetHeaders(self, soap_headers, http_headers):
self.suds_client.set_options(soapheaders=soap_headers, headers=http_headers) | 152,884 |
Determine if the wsdl contains a method.
Args:
method_name: The name of the method to search.
Returns:
True if the method is in the WSDL, otherwise False. | def _WsdlHasMethod(self, method_name):
return method_name in self.suds_client.wsdl.services[0].ports[0].methods | 152,885 |
Create a method wrapping an invocation to the SOAP service.
Args:
method_name: A string identifying the name of the SOAP method to call.
Returns:
A callable that can be used to make the desired SOAP request. | def _CreateMethod(self, method_name):
soap_service_method = getattr(self.suds_client.service, method_name)
def MakeSoapRequest(*args):
AddToUtilityRegistry('suds')
self.SetHeaders(
self._header_handler.GetSOAPHeaders(self.CreateSoapElementForType),
self._header_handler.GetHTTPHeaders())
try:
return soap_service_method(
*[_PackForSuds(arg, self.suds_client.factory,
self._packer) for arg in args])
except suds.WebFault as e:
if _logger.isEnabledFor(logging.WARNING):
_logger.warning('Response summary - %s',
_ExtractResponseSummaryFields(e.document))
_logger.debug('SOAP response:\n%s', e.document.str())
if not hasattr(e.fault, 'detail'):
exc = (googleads.errors.
GoogleAdsServerFault(e.document, message=e.fault.faultstring))
raise exc # Done this way for 2to3
# Before re-throwing the WebFault exception, an error object needs to be
# wrapped in a list for safe iteration.
fault = e.fault.detail.ApiExceptionFault
if not hasattr(fault, 'errors') or fault.errors is None:
exc = (googleads.errors.
GoogleAdsServerFault(e.document, message=e.fault.faultstring))
raise exc # Done this way for 2to3
obj = fault.errors
if not isinstance(obj, list):
fault.errors = [obj]
exc = googleads.errors.GoogleAdsServerFault(e.document, fault.errors,
message=e.fault.faultstring)
raise exc # Done this way for 2to3
return MakeSoapRequest | 152,886 |
Overriding the egress function to set our headers.
Args:
envelope: An Element with the SOAP request data.
http_headers: A dict of the current http headers.
operation: The SoapOperation instance.
binding_options: An options dict for the SOAP binding.
Returns:
A tuple of the envelope and headers. | def egress(self, envelope, http_headers, operation, binding_options):
custom_headers = self._header_handler.GetHTTPHeaders()
http_headers.update(custom_headers)
return envelope, http_headers | 152,887 |
Get the raw SOAP XML for a request.
Args:
method: The method name.
*args: A list of arguments to be passed to the method.
Returns:
An element containing the raw XML that would be sent as the request. | def GetRequestXML(self, method, *args):
packed_args = self._PackArguments(method, args, set_type_attrs=True)
headers = self._GetZeepFormattedSOAPHeaders()
return self.zeep_client.create_message(
self.zeep_client.service, method, *packed_args, _soapheaders=headers) | 152,889 |
Determine if a method is in the wsdl.
Args:
method_name: The name of the method.
Returns:
True if the method is in the wsdl, otherwise False. | def _WsdlHasMethod(self, method_name):
try:
self._method_bindings.get(method_name)
return True
except ValueError:
return False | 152,890 |
An imperfect but decent method for determining if a string is base64.
Args:
s: A string with the data to test.
Returns:
True if s is base64, else False. | def _IsBase64(cls, s):
try:
if base64.b64encode(base64.b64decode(s)).decode('utf-8') == s:
return True
except (TypeError, binascii.Error):
pass
return False | 152,893 |
Recursive helper for PackArguments.
Args:
elem: The element type we are creating.
data: The data to instantiate it with.
set_type_attrs: A boolean indicating whether or not attributes that end
in .Type should be set. This is only necessary for batch job service.
Returns:
An instance of type 'elem'. | def _PackArgumentsHelper(self, elem, data, set_type_attrs):
if self._packer:
data = self._packer.Pack(data, self._version)
if isinstance(data, dict): # Instantiate from simple Python dict
# See if there is a manually specified derived type.
type_override = data.get('xsi_type')
if type_override:
elem_type = self._DiscoverElementTypeFromLocalname(type_override)
else:
elem_type = elem.type
data_formatted = data.iteritems()
packed_result = self._CreateComplexTypeFromData(
elem_type, type_override is not None, data_formatted, set_type_attrs)
elif isinstance(data, zeep.xsd.CompoundValue):
# Here the data is already a SOAP element but we still need to look
# through it in case it has been edited with Python dicts.
elem_type = data._xsd_type
data_formatted = zip(dir(data), [data[k] for k in dir(data)])
packed_result = self._CreateComplexTypeFromData(
elem_type, False, data_formatted, set_type_attrs)
elif isinstance(data, (list, tuple)):
packed_result = [self._PackArgumentsHelper(elem, item, set_type_attrs)
for item in data]
else:
if elem.type.name == 'base64Binary' and self._IsBase64(data):
_logger.warn('Passing data to base64 field %s that may '
'already be encoded. Do not pre-encode base64 '
'fields with zeep.', elem.name)
packed_result = data
return packed_result | 152,894 |
Searches all namespaces for a type by name.
Args:
type_localname: The name of the type.
Returns:
A fully qualified SOAP type with the specified name.
Raises:
A zeep.exceptions.LookupError if the type cannot be found in any
namespace. | def _DiscoverElementTypeFromLocalname(self, type_localname):
elem_type = None
last_exception = None
for ns_prefix in self.zeep_client.wsdl.types.prefix_map.values():
try:
elem_type = self.zeep_client.get_type(
'{%s}%s' % (ns_prefix, type_localname))
except zeep.exceptions.LookupError as e:
last_exception = e
continue
break
if not elem_type:
raise last_exception
return elem_type | 152,895 |
Initialize a SOAP element with specific data.
Args:
elem_type: The type of the element to create.
type_is_override: A boolean specifying if the type is being overridden.
data: The data to hydrate the type with.
set_type_attrs: A boolean indicating whether or not attributes that end
in .Type should be set. This is only necessary for batch job service.
Returns:
An fully initialized SOAP element. | def _CreateComplexTypeFromData(
self, elem_type, type_is_override, data, set_type_attrs):
elem_arguments = dict(elem_type.elements)
# A post order traversal of the original data, need to instantiate from
# the bottom up.
instantiated_arguments = {
k: self._PackArgumentsHelper(elem_arguments[k], v, set_type_attrs)
for k, v in data if k != 'xsi_type'}
if set_type_attrs:
found_type_attr = next((e_name for e_name, _ in elem_type.elements
if e_name.endswith('.Type')), None)
if found_type_attr and type_is_override:
instantiated_arguments[found_type_attr] = elem_type.qname.localname
# Now go back through the tree instantiating SOAP types as we go.
return elem_type(**instantiated_arguments) | 152,896 |
Create a method wrapping an invocation to the SOAP service.
Args:
method_name: A string identifying the name of the SOAP method to call.
Returns:
A callable that can be used to make the desired SOAP request. | def _CreateMethod(self, method_name):
soap_service_method = self.zeep_client.service[method_name]
def MakeSoapRequest(*args):
AddToUtilityRegistry('zeep')
soap_headers = self._GetZeepFormattedSOAPHeaders()
packed_args = self._PackArguments(method_name, args)
try:
return soap_service_method(
*packed_args, _soapheaders=soap_headers)['body']['rval']
except zeep.exceptions.Fault as e:
error_list = ()
if e.detail is not None:
underlying_exception = e.detail.find(
'{%s}ApiExceptionFault' % self._GetBindingNamespace())
fault_type = self.zeep_client.get_element(
'{%s}ApiExceptionFault' % self._GetBindingNamespace())
fault = fault_type.parse(
underlying_exception, self.zeep_client.wsdl.types)
error_list = fault.errors or error_list
raise googleads.errors.GoogleAdsServerFault(
e.detail, errors=error_list, message=e.message)
return MakeSoapRequest | 152,898 |
Recursively display a node and each of its children.
Args:
node: The node we're displaying the children of.
children: Children of the parent node.
level: How deep in the tree we are. | def DisplayTree(node, children, level=0):
value = ''
node_type = ''
if 'caseValue' in node:
case_value = node['caseValue']
node_type = case_value['ProductDimension.Type']
if node_type == 'ProductCanonicalCondition':
value = (case_value['condition'] if 'condition' in case_value
else 'OTHER')
elif node_type == 'ProductBiddingCategory':
value = '%s(%s)' % (case_value['type'], case_value['value']
if 'value' in case_value else 'OTHER')
else:
value = (case_value['value'] if 'value' in case_value else 'OTHER')
print ('%sid: %s, node_type: %s, value: %s\n'
% (' ' * level, node['id'], node_type, value))
for child_node in children[node['id']]:
DisplayTree(child_node, children, level + 1) | 152,902 |
Download all ad units.
Args:
inventory_service: An instance of the InventoryService.
Returns:
A list containing all ad units. | def get_all_ad_units(inventory_service):
# Create a statement to get all ad units.
statement = (ad_manager.StatementBuilder(version='v201811')
.OrderBy('id', ascending=True))
# Pull down all ad units into a list
keep_iterating = True
total_results = 0
found_ad_units = []
while keep_iterating:
page = inventory_service.getAdUnitsByStatement(statement.ToStatement())
if 'results' in page and len(page['results']):
total_results = page['totalResultSetSize']
found_ad_units.extend(page['results'])
statement.offset += statement.limit
keep_iterating = statement.offset < total_results
return found_ad_units | 152,911 |
Display the ad units as a tree.
Args:
root_ad_unit: The root ad unit to begin from.
all_ad_units: A list containing all ad units. | def display_hierarchy(root_ad_unit, all_ad_units):
# Create a dict mapping the ids of parents to lists of their children.
parent_id_to_children = collections.defaultdict(list)
for ad_unit in all_ad_units:
if 'parentId' in ad_unit:
parent_id_to_children[ad_unit['parentId']].append(ad_unit)
parent_id_to_children = dict(parent_id_to_children)
display_hierarchy_helper(root_ad_unit, parent_id_to_children, 0) | 152,912 |
Recursive helper for displaying the hierarchy.
Args:
root: The current root ad unit.
parent_id_to_children: The overall map of parent ids to children.
depth: The current depth. | def display_hierarchy_helper(root, parent_id_to_children, depth):
print '%s%s (%s)' % ('%s+--' % ('|'.join([' '] * depth)),
root['name'], root['id'])
# Recurse for each child of this root that has children.
for child in parent_id_to_children.get(root['id'], []):
display_hierarchy_helper(child, parent_id_to_children, depth + 1) | 152,913 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.