docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Create a campaign group. Args: client: an AdWordsClient instance. Returns: The integer ID of the created campaign group.
def _CreateCampaignGroup(client): # Get the CampaignGroupService. campaign_group_service = client.GetService('CampaignGroupService', version='v201809') # Create the operation. operations = [{ 'operator': 'ADD', # Create the campaign group. 'operand': { 'name': 'Mars campaign group #%d' % uuid.uuid4() } }] campaign_group = campaign_group_service.mutate(operations)['value'][0] campaign_group_id = campaign_group['id'] # Display the results. print 'Campaign group with ID "%d" and name "%s" was created.' % ( campaign_group_id, campaign_group['name']) return campaign_group_id
152,915
Adds multiple campaigns to a campaign group. Args: client: an AdWordsClient instance. campaign_group_id: an integer ID for the campaign group. campaign_ids: a list of integer IDs for campaigns.
def _AddCampaignsToGroup(client, campaign_group_id, campaign_ids): # Get the CampaignService. campaign_service = client.GetService('CampaignService', version='v201809') # Create the operations. operations = [{ 'operator': 'SET', 'operand': { 'id': campaign_id, 'campaignGroupId': campaign_group_id } } for campaign_id in campaign_ids] campaign_service.mutate(operations) # Display the results. print ('The following campaign IDs were added to the campaign group with ID ' '"%d":\n\t%s' % (campaign_group_id, campaign_ids))
152,916
Creates a performance target for the campaign group. Args: client: an AdWordsClient instance. campaign_group_id: an integer ID for the campaign group.
def _CreatePerformanceTarget(client, campaign_group_id): # Get the CampaignGroupPerformanceTargetService. cgpt_service = client.GetService('CampaignGroupPerformanceTargetService', version='v201809') # Create the operation. operations = [{ 'operator': 'ADD', # Create the performance target. 'operand': { 'campaignGroupId': campaign_group_id, 'performanceTarget': { # Keep the CPC for the campaigns < $3. 'efficiencyTargetType': 'CPC_LESS_THAN_OR_EQUAL_TO', 'efficiencyTargetValue': 3000000, # Keep the maximum spend under $50. 'spendTargetType': 'MAXIMUM', 'spendTarget': { 'microAmount': 500000000 }, # Aim for at least 3000 clicks. 'volumeGoalType': 'MAXIMIZE_CLICKS', 'volumeTargetValue': 3000, # Start the performance target today, and run it for the next 90 # days. 'startDate': datetime.datetime.now().strftime('%Y%m%d'), 'endDate': (datetime.datetime.now() + datetime.timedelta(90)).strftime('%Y%m%d') } } }] cgpt = cgpt_service.mutate(operations)['value'][0] # Display the results. print ('Campaign performance target with ID "%d" was added for campaign ' 'group ID "%d".' % (cgpt['id'], cgpt['campaignGroupId']))
152,917
Returns the SOAP headers required for request authorization. Args: create_method: The SOAP library specific method used to instantiate SOAP objects. Returns: A SOAP object containing the headers.
def GetSOAPHeaders(self, create_method): header = create_method(self._SOAP_HEADER_CLASS) header.networkCode = self._ad_manager_client.network_code header.applicationName = ''.join([ self._ad_manager_client.application_name, googleads.common.GenerateLibSig(self._PRODUCT_SIG)]) return header
152,933
Pack the given object using Ad Manager-specific logic. Args: obj: an object to be packed for SOAP using Ad Manager-specific logic, if applicable. version: the version of the current API, e.g. 'v201811' Returns: The given object packed with Ad Manager-specific logic for SOAP, if applicable. Otherwise, returns the given object unmodified.
def Pack(cls, obj, version): if isinstance(obj, (datetime.datetime, datetime.date)): return cls.AdManagerDateTimePacker(obj, version) return obj
152,934
Returns dicts formatted for Ad Manager SOAP based on date/datetime. Args: value: A date or datetime object to be converted. version: the version of the current API, e.g. 'v201811' Returns: The value object correctly represented for Ad Manager SOAP.
def AdManagerDateTimePacker(cls, value, version): if isinstance(value, datetime.datetime): if value.tzinfo is None: raise googleads.errors.GoogleAdsValueError( 'Datetime %s is not timezone aware.' % value ) return { 'date': cls.AdManagerDateTimePacker(value.date(), version), 'hour': value.hour, 'minute': value.minute, 'second': value.second, # As of V201811, timeZoneID was renamed timeZoneId 'timeZoneId' if version >= 'v201811' else 'timeZoneID': value.tzinfo.zone, } elif isinstance(value, datetime.date): return {'year': value.year, 'month': value.month, 'day': value.day}
152,935
Converts a single python value to its PQL representation. Args: value: A python value. version: A string identifying the Ad Manager version the value object is compatible with. This defaults to what is currently the latest version. This will be updated in future releases to point to what is then the latest version. Returns: The value formatted for PQL statements which are compatible with a particular API version.
def GetValueRepresentation(cls, value, version=sorted(_SERVICE_MAP.keys())[-1]): if isinstance(value, str) or isinstance(value, unicode): return {'value': value, 'xsi_type': 'TextValue'} elif isinstance(value, bool): return {'value': value, 'xsi_type': 'BooleanValue'} elif isinstance(value, numbers.Number): return {'value': value, 'xsi_type': 'NumberValue'} # It's important that datetime is checked for before date # because isinstance(datetime.datetime.now(), datetime.date) is True elif isinstance(value, datetime.datetime): if value.tzinfo is None: raise googleads.errors.GoogleAdsValueError( 'Datetime %s is not timezone aware.' % value ) return { 'xsi_type': 'DateTimeValue', 'value': { 'date': { 'year': value.year, 'month': value.month, 'day': value.day, }, 'hour': value.hour, 'minute': value.minute, 'second': value.second, 'timeZoneId' if version >= 'v201811' else 'timeZoneID': value.tzinfo.zone, } } elif isinstance(value, datetime.date): return { 'xsi_type': 'DateValue', 'value': { 'year': value.year, 'month': value.month, 'day': value.day, } } elif isinstance(value, list): if value and not all(isinstance(x, type(value[0])) for x in value): raise googleads.errors.GoogleAdsValueError('Cannot pass more than one ' 'type in a set.') return { 'xsi_type': 'SetValue', 'values': [cls.GetValueRepresentation(v, version) for v in value] } else: raise googleads.errors.GoogleAdsValueError( 'Can\'t represent unknown type: %s.' % type(value))
152,937
Runs a report, then waits (blocks) for the report to finish generating. Args: report_job: The report job to wait for. This may be a dictionary or an instance of the SOAP ReportJob class. Returns: The completed report job's ID as a string. Raises: An AdManagerReportError if the report job fails to complete.
def WaitForReport(self, report_job): service = self._GetReportService() report_job_id = service.runReportJob(report_job)['id'] if self._version > 'v201502': status = service.getReportJobStatus(report_job_id) else: status = service.getReportJob(report_job_id)['reportJobStatus'] while status != 'COMPLETED' and status != 'FAILED': _data_downloader_logger.debug('Report job status: %s', status) time.sleep(30) if self._version > 'v201502': status = service.getReportJobStatus(report_job_id) else: status = service.getReportJob(report_job_id)['reportJobStatus'] if status == 'FAILED': raise googleads.errors.AdManagerReportError(report_job_id) else: _data_downloader_logger.debug('Report has completed successfully') return report_job_id
152,941
Downloads the results of a PQL query to a list. Args: pql_query: str a statement filter to apply (the query should not include the limit or the offset) [optional] values: A dict of python objects or a list of raw SOAP values to bind to the pql_query. Returns: a list of lists with the first being the header row and each subsequent list being a row of results.
def DownloadPqlResultToList(self, pql_query, values=None): results = [] self._PageThroughPqlSet(pql_query, results.append, values) return results
152,943
Downloads the results of a PQL query to CSV. Args: pql_query: str a statement filter to apply (the query should not include the limit or the offset) file_handle: file the file object to write to. [optional] values: A dict of python objects or a list of raw SOAP values to bind to the pql_query.
def DownloadPqlResultToCsv(self, pql_query, file_handle, values=None): pql_writer = csv.writer(file_handle, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL) self._PageThroughPqlSet(pql_query, pql_writer.writerow, values)
152,944
Sanitizes a field value from a Value object to a CSV suitable format. Args: pql_value: dict a dictionary containing the data for a single field of an entity. Returns: str a CSV writer friendly value formatted by Value.Type.
def _ConvertValueForCsv(self, pql_value): if 'value' in pql_value: field = pql_value['value'] elif 'values' in pql_value: field = pql_value['values'] else: field = None if field: if isinstance(field, list): if all(AdManagerClassType(single_field) == AdManagerClassType(field[0]) for single_field in field): return ','.join([ '"%s"' % str(self._ConvertValueForCsv(single_field)) for single_field in field]) else: raise googleads.errors.GoogleAdsValueError( 'The set value returned contains unsupported mix value types') class_type = AdManagerClassType(pql_value) if class_type == 'TextValue': s = field.replace('"', '""') # Encode UTF-8 characters for Python 2 only. if sys.version_info.major < 3: s = s.encode('UTF8') return s elif class_type == 'NumberValue': return float(field) if '.' in field else int(field) elif class_type == 'DateTimeValue': return self._ConvertDateTimeToOffset(field) elif class_type == 'DateValue': return datetime.date(int(field['date']['year']), int(field['date']['month']), int(field['date']['day'])).isoformat() else: return field else: return '-'
152,945
Pages through a pql_query and performs an action (output_function). Args: pql_query: str a statement filter to apply (the query should not include the limit or the offset) output_function: the function to call to output the results (csv or in memory) values: A dict of python objects or a list of raw SOAP values to bind to the pql_query.
def _PageThroughPqlSet(self, pql_query, output_function, values): if isinstance(values, dict): values = PQLHelper.GetQueryValuesFromDict(values, self._version) pql_service = self._GetPqlService() current_offset = 0 while True: query_w_limit_offset = '%s LIMIT %d OFFSET %d' % (pql_query, SUGGESTED_PAGE_LIMIT, current_offset) response = pql_service.select({'query': query_w_limit_offset, 'values': values}) if 'rows' in response: # Write the header row only on first pull if current_offset == 0: header = response['columnTypes'] output_function([label['labelName'] for label in header]) entities = response['rows'] result_set_size = len(entities) for entity in entities: output_function([self._ConvertValueForCsv(value) for value in entity['values']]) current_offset += result_set_size if result_set_size != SUGGESTED_PAGE_LIMIT: break else: break
152,946
Converts the PQL formatted response for a dateTime object. Output conforms to ISO 8061 format, e.g. 'YYYY-MM-DDTHH:MM:SSz.' Args: date_time_value: dict The date time value from the PQL response. Returns: str: A string representation of the date time value uniform to ReportService.
def _ConvertDateTimeToOffset(self, date_time_value): date_time_obj = datetime.datetime(int(date_time_value['date']['year']), int(date_time_value['date']['month']), int(date_time_value['date']['day']), int(date_time_value['hour']), int(date_time_value['minute']), int(date_time_value['second'])) # v201808 is the last Ad Manager version to use timeZoneID. if self._version > 'v201808': time_zone_str = 'timeZoneId' else: time_zone_str = 'timeZoneID' date_time_str = pytz.timezone( date_time_value[time_zone_str]).localize(date_time_obj).isoformat() if date_time_str[-5:] == '00:00': return date_time_str[:-6] + 'Z' else: return date_time_str
152,947
Add a new BatchJob to upload operations to. Args: client: an instantiated AdWordsClient used to retrieve the BatchJob. Returns: The new BatchJob created by the request.
def AddBatchJob(client): # Initialize appropriate service. batch_job_service = client.GetService('BatchJobService', version='v201809') # Create a BatchJob. batch_job_operations = [{ 'operand': {}, 'operator': 'ADD' }] return batch_job_service.mutate(batch_job_operations)['value'][0]
152,950
Builds the operations adding a Keyword Criterion to each AdGroup. Args: adgroup_id: an integer identifying an AdGroup to associate the keywords with. Returns: a list containing the operations that will create a new Keyword Criterion associated with each provided AdGroup.
def BuildAdGroupCriterionOperations(adgroup_id): criterion_operations = [ { # You must specify the xsi_type of operations run by the # BatchJobService. 'xsi_type': 'AdGroupCriterionOperation', 'operand': { 'xsi_type': 'BiddableAdGroupCriterion', 'adGroupId': adgroup_id, 'criterion': { 'xsi_type': 'Keyword', # Make 10% of keywords invalid to demonstrate error handling. 'text': 'mars%s%s' % (uuid.uuid4(), '!!!' if i % 10 == 0 else ''), 'matchType': 'BROAD' } }, 'operator': 'ADD' } for i in range(KEYWORD_COUNT)] return criterion_operations
152,951
Cancels the given BatchJob. Args: client: an instantiated AdWordsClient used to cancel the BatchJob. batch_job: a BatchJob to be canceled. max_poll_attempts: an int defining the number of times the BatchJob will be checked to determine whether it has been canceled.
def CancelBatchJob(client, batch_job, max_poll_attempts=MAX_POLL_ATTEMPTS): batch_job_service = client.GetService('BatchJobService', 'v201809') batch_job['status'] = 'CANCELING' operation = { 'operator': 'SET', 'operand': batch_job } batch_job_service.mutate([operation]) # Verify that the Batch Job cancels. poll_attempt = 0 while (poll_attempt in range(max_poll_attempts) and batch_job['status'] != 'CANCELED'): sleep_interval = (30 * (2 ** poll_attempt) + (random.randint(0, 10000) / 1000)) print ('Batch Job not finished canceling, sleeping for %s seconds.' % sleep_interval) time.sleep(sleep_interval) batch_job = GetBatchJob(client, batch_job['id']) poll_attempt += 1 if batch_job['status'] == 'CANCELED': print ('Batch Job with ID "%d" has been successfully canceled.' % batch_job['id']) else: print ('Batch Job with ID "%d" failed to cancel after polling %d times.' % (batch_job['id'], max_poll_attempts))
152,952
Retrieves the BatchJob with the given id. Args: client: an instantiated AdWordsClient used to retrieve the BatchJob. batch_job_id: a long identifying the BatchJob to be retrieved. Returns: The BatchJob associated with the given id.
def GetBatchJob(client, batch_job_id): batch_job_service = client.GetService('BatchJobService', 'v201809') selector = { 'fields': ['Id', 'Status', 'DownloadUrl'], 'predicates': [ { 'field': 'Id', 'operator': 'EQUALS', 'values': [batch_job_id] } ] } return batch_job_service.get(selector)['entries'][0]
152,953
Prints the BatchJobService response. Args: batch_job_helper: a BatchJobHelper instance. response_xml: a string containing a response from the BatchJobService.
def PrintResponse(batch_job_helper, response_xml): response = batch_job_helper.ParseResponse(response_xml) if 'rval' in response['mutateResponse']: for data in response['mutateResponse']['rval']: if 'errorList' in data: print 'Operation %s - FAILURE:' % data['index'] print '\terrorType=%s' % data['errorList']['errors']['ApiError.Type'] print '\ttrigger=%s' % data['errorList']['errors']['trigger'] print '\terrorString=%s' % data['errorList']['errors']['errorString'] print '\tfieldPath=%s' % data['errorList']['errors']['fieldPath'] print '\treason=%s' % data['errorList']['errors']['reason'] if 'result' in data: print 'Operation %s - SUCCESS.' % data['index']
152,955
Initializes an APIHandler. Args: client_id: The client customer id retrieved from the Developers Console. client_secret: The client secret retrieved from the Developers Console. refresh_token: The refresh token retrieved with generate_refresh_token.py. manager_account_id: The AdWords manager account Id. dev_token: The AdWords Developer Token.
def __init__(self, client_id, client_secret, refresh_token, manager_account_id, dev_token): credentials = GoogleRefreshTokenClient(client_id, client_secret, refresh_token) self.client = AdWordsClient(dev_token, credentials, self._USER_AGENT, client_customer_id=manager_account_id, cache=ZeepServiceProxy.NO_CACHE)
152,962
Create a new ad group. Args: client_customer_id: str Client Customer Id used to create the AdGroup. campaign_id: str Id of the campaign to use. name: str Name to assign to the AdGroup. status: str Status to assign to the AdGroup when it is created.
def AddAdGroup(self, client_customer_id, campaign_id, name, status): self.client.SetClientCustomerId(client_customer_id) ad_group_service = self.client.GetService('AdGroupService') operations = [{ 'operator': 'ADD', 'operand': { 'campaignId': campaign_id, 'name': name, 'status': status } }] ad_group_service.mutate(operations)
152,963
Create a new Budget with the given microAmount. Args: client_customer_id: str Client Customer Id used to create Budget. micro_amount: str The budget represented in micros. Returns: str BudgetId of the newly created Budget.
def AddBudget(self, client_customer_id, micro_amount): self.client.SetClientCustomerId(client_customer_id) budget_service = self.client.GetService('BudgetService') operations = [{ 'operator': 'ADD', 'operand': { 'name': 'Budget #%s' % time.time(), 'amount': { 'microAmount': micro_amount }, 'deliveryMethod': 'STANDARD' } }] return budget_service.mutate(operations)['value'][0]['budgetId']
152,964
Add a Campaign to the client account. Args: client_customer_id: str Client Customer Id to use when creating Campaign. campaign_name: str Name of the campaign to be added. ad_channel_type: str Primary serving target the campaign's ads. budget: str a budget amount (in micros) to use.
def AddCampaign(self, client_customer_id, campaign_name, ad_channel_type, budget): self.client.SetClientCustomerId(client_customer_id) campaign_service = self.client.GetService('CampaignService') budget_id = self.AddBudget(client_customer_id, budget) operations = [{ 'operator': 'ADD', 'operand': { 'name': campaign_name, 'status': 'PAUSED', 'biddingStrategyConfiguration': { 'biddingStrategyType': 'MANUAL_CPC', 'biddingScheme': { 'xsi_type': 'ManualCpcBiddingScheme', 'enhancedCpcEnabled': 'false' } }, 'budget': { 'budgetId': budget_id }, 'advertisingChannelType': ad_channel_type } }] campaign_service.mutate(operations)
152,965
Retrieves all AdGroups for the given campaign that haven't been removed. Args: client_customer_id: str Client Customer Id being used in API request. campaign_id: str id of the campaign for which to fetch ad groups. Returns: list List of AdGroup data objects.
def GetAdGroups(self, client_customer_id, campaign_id): self.client.SetClientCustomerId(client_customer_id) selector = { 'fields': ['Id', 'Name', 'Status'], 'predicates': [ { 'field': 'CampaignId', 'operator': 'EQUALS', 'values': [campaign_id] }, { 'field': 'Status', 'operator': 'NOT_EQUALS', 'values': ['REMOVED'] } ] } adgroups = self.client.GetService('AdGroupService').get(selector) if int(adgroups['totalNumEntries']) > 0: return adgroups['entries'] else: return None
152,967
Return a Budget with the associated budgetId. Args: client_customer_id: str Client Customer Id to which the budget belongs. budget_id: str id of the budget we want to examine. Returns: Budget A Budget data object.
def GetBudget(self, client_customer_id, budget_id): self.client.SetClientCustomerId(client_customer_id) selector = { 'fields': ['BudgetId', 'BudgetName', 'BudgetStatus', 'Amount', 'DeliveryMethod', 'BudgetReferenceCount', 'IsBudgetExplicitlyShared'], 'predicates': [ { 'field': 'BudgetId', 'operator': 'EQUALS', 'values': [budget_id] } ] } budgets = self.client.GetService('BudgetService').get(selector) if int(budgets['totalNumEntries']) > 0: return budgets['entries'][0] else: return None
152,968
Returns a client account's Campaigns that haven't been removed. Args: client_customer_id: str Client Customer Id used to retrieve Campaigns. Returns: list List of Campaign data objects.
def GetCampaigns(self, client_customer_id): self.client.SetClientCustomerId(client_customer_id) # A somewhat hackish workaround for "The read operation timed out" error, # which could be triggered on AppEngine's end if the request is too large # and is taking too long. max_tries = 3 today = time.strftime('%Y%m%d', time.localtime()) for i in xrange(1, max_tries + 1): try: selector = { 'fields': ['Id', 'Name', 'Status', 'BudgetId', 'Amount'], 'predicates': [ { 'field': 'Status', 'operator': 'NOT_EQUALS', 'values': ['REMOVED'] } ], 'dateRange': { 'min': today, 'max': today } } campaigns = self.client.GetService('CampaignService').get(selector) if int(campaigns['totalNumEntries']) > 0: return campaigns['entries'] else: return None except Exception, e: if i == max_tries: raise GoogleAdsError(e) continue
152,969
Update a Budget with the given budgetId. Args: client_customer_id: str Client Customer Id used to update Budget. budget_id: str Id of the budget to be updated. micro_amount: str New value for the microAmount field. delivery_method: str New value for the deliveryMethod field.
def UpdateBudget(self, client_customer_id, budget_id, micro_amount, delivery_method): self.client.SetClientCustomerId(client_customer_id) operations = [{ 'operator': 'SET', 'operand': { 'budgetId': budget_id, 'amount': { 'microAmount': micro_amount }, 'deliveryMethod': delivery_method } }] self.client.GetService('BudgetService').mutate(operations)
152,970
Creates the extension setting for a list of Feed Items. Args: client: an AdWordsClient instance. feed_items: the list of all Feed Items. campaign_feed: the original Campaign Feed. feed_item_ids: the Ids of the feed items for which extension settings should be created. platform_restrictions: an optional Platform Restriction for the Feed items.
def CreateExtensionSetting(client, feed_items, campaign_feed, feed_item_ids, platform_restrictions=None): campaign_extension_setting_service = client.GetService( 'CampaignExtensionSettingService', 'v201809') extension_feed_items = [{ CreateSitelinkFeedItem(feed_items, feed_item_id) } for feed_item_id in feed_item_ids] extension_setting = { 'extensions': extension_feed_items } if platform_restrictions: extension_setting['platformRestrictions'] = platform_restrictions campaign_extension_setting = { 'campaignId': campaign_feed['campaignId'], 'extensionType': 'SITELINK', 'extensionSetting': extension_setting } operation = { 'operand': campaign_extension_setting, 'operator': 'ADD' } campaign_extension_setting_service.mutate([operation])
152,972
Creates a Sitelink Feed Item. Args: feed_items: a list of all Feed Items. feed_item_id: the Id of a specific Feed Item for which a Sitelink Feed Item should be created. Returns: The new Sitelink Feed Item.
def CreateSitelinkFeedItem(feed_items, feed_item_id): site_link_from_feed = feed_items[feed_item_id] site_link_feed_item = { 'sitelinkText': site_link_from_feed['text'], 'sitelinkLine2': site_link_from_feed['line2'], 'sitelinkLine3': site_link_from_feed['line3'], } if 'finalUrls' in site_link_from_feed and site_link_from_feed['finalUrls']: site_link_feed_item['sitelinkFinalUrls'] = { 'urls': site_link_from_feed['finalUrls'] } if 'finalMobileUrls' in site_link_from_feed: site_link_feed_item['sitelinkFinalMobileUrls'] = { 'urls': site_link_from_feed['finalMobileUrls'] } site_link_feed_item['sitelinkTrackingUrlTemplate'] = ( site_link_from_feed['trackingUrlTemplate']) else: site_link_feed_item['sitelinkUrl'] = site_link_from_feed['url'] return site_link_feed_item
152,973
Deletes a campaign feed. Args: client: an AdWordsClient instance. campaign_feed: the campaign feed to delete.
def DeleteCampaignFeed(client, campaign_feed): campaign_feed_service = client.GetService('CampaignFeedService', 'v201809') operation = { 'operand': campaign_feed, 'operator': 'REMOVE' } campaign_feed_service.mutate([operation])
152,974
Deletes the old feed items for which extension settings have been created. Args: client: an AdWordsClient instance. feed_item_ids: a list of Feed Item Ids. feed: the Feed containing the given Feed Item Ids.
def DeleteOldFeedItems(client, feed_item_ids, feed): if not feed_item_ids: return feed_item_service = client.GetService('FeedItemService', 'v201809') operations = [{ 'operator': 'REMOVE', 'operand': { 'feedId': feed['id'], 'feedItemId': feed_item_id } } for feed_item_id in feed_item_ids] feed_item_service.mutate(operations)
152,975
Get a list of Feed Item Ids used by a campaign via a given Campaign Feed. Args: client: an AdWordsClient instance. feed: a Campaign Feed. placeholder_type: the Placeholder Type. Returns: A list of Feed Item Ids.
def GetCampaignFeeds(client, feed, placeholder_type): campaign_feed_service = client.GetService('CampaignFeedService', 'v201809') campaign_feeds = [] more_pages = True selector = { 'fields': ['CampaignId', 'MatchingFunction', 'PlaceholderTypes'], 'predicates': [ { 'field': 'Status', 'operator': 'EQUALS', 'values': ['ENABLED'] }, { 'field': 'FeedId', 'operator': 'EQUALS', 'values': [feed['id']] }, { 'field': 'PlaceholderTypes', 'operator': 'CONTAINS_ANY', 'values': [placeholder_type] } ], 'paging': { 'startIndex': 0, 'numberResults': PAGE_SIZE } } while more_pages: page = campaign_feed_service.get(selector) if 'entries' in page: campaign_feeds.extend(page['entries']) selector['paging']['startIndex'] += PAGE_SIZE more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries']) return campaign_feeds
152,976
Returns a list of all enabled Feeds. Args: client: an AdWordsClient instance. Returns: A list containing all enabled Feeds.
def GetFeeds(client): feed_service = client.GetService('FeedService', 'v201809') feeds = [] more_pages = True selector = { 'fields': ['Id', 'Name', 'Attributes'], 'predicates': [ { 'field': 'Origin', 'operator': 'EQUALS', 'values': ['USER'] }, { 'field': 'FeedStatus', 'operator': 'EQUALS', 'values': ['ENABLED'] } ], 'paging': { 'startIndex': 0, 'numberResults': PAGE_SIZE } } while more_pages: page = feed_service.get(selector) if 'entries' in page: feeds.extend(page['entries']) selector['paging']['startIndex'] += PAGE_SIZE more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries']) return feeds
152,977
Returns the Feed Items for a given Feed. Args: client: an AdWordsClient instance. feed: the Feed we are retrieving Feed Items from. Returns: The Feed Items associated with the given Feed.
def GetFeedItems(client, feed): feed_item_service = client.GetService('FeedItemService', 'v201809') feed_items = [] more_pages = True selector = { 'fields': ['FeedItemId', 'AttributeValues'], 'predicates': [ { 'field': 'Status', 'operator': 'EQUALS', 'values': ['ENABLED'] }, { 'field': 'FeedId', 'operator': 'EQUALS', 'values': [feed['id']] } ], 'paging': { 'startIndex': 0, 'numberResults': PAGE_SIZE } } while more_pages: page = feed_item_service.get(selector) if 'entries' in page: feed_items.extend(page['entries']) selector['paging']['startIndex'] += PAGE_SIZE more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries']) return feed_items
152,978
Gets the Feed Item Ids used by a campaign through a given Campaign Feed. Args: campaign_feed: the Campaign Feed we are retrieving Feed Item Ids from. Returns: A list of Feed Item IDs.
def GetFeedItemIdsForCampaign(campaign_feed): feed_item_ids = set() try: lhs_operand = campaign_feed['matchingFunction']['lhsOperand'] except KeyError: lhs_operand = None if (lhs_operand and lhs_operand[0]['FunctionArgumentOperand.Type'] == 'RequestContextOperand'): request_context_operand = lhs_operand[0] if (request_context_operand['contextType'] == 'FEED_ITEM_ID' and campaign_feed['matchingFunction']['operator'] == 'IN'): for argument in campaign_feed['matchingFunction']['rhsOperand']: if argument['xsi_type'] == 'ConstantOperand': feed_item_ids.add(argument['longValue']) return feed_item_ids
152,979
Gets the Feed Mapping for a given Feed. Args: client: an AdWordsClient instance. feed: the Feed we are retrieving the Feed Mapping for. placeholder_type: the Placeholder Type we are looking for. Returns: A dictionary containing the Feed Mapping.
def GetFeedMapping(client, feed, placeholder_type): feed_mapping_service = client.GetService('FeedMappingService', 'v201809') attribute_mappings = {} more_pages = True selector = { 'fields': ['FeedMappingId', 'AttributeFieldMappings'], 'predicates': [ { 'field': 'FeedId', 'operator': 'EQUALS', 'values': [feed['id']] }, { 'field': 'PlaceholderType', 'operator': 'EQUALS', 'values': [placeholder_type] } ], 'paging': { 'startIndex': 0, 'numberResults': PAGE_SIZE } } while more_pages: page = feed_mapping_service.get(selector) if 'entries' in page: # Normally, a feed attribute is mapped only to one field. However, you may # map it to more than one field if needed. for feed_mapping in page['entries']: for attribute_mapping in feed_mapping['attributeFieldMappings']: # Since attribute mappings can have multiple values for each key, # we use a list to store the values. if attribute_mapping['feedAttributeId'] in attribute_mappings: attribute_mappings[attribute_mapping['feedAttributeId']].append( attribute_mapping['fieldId']) else: attribute_mappings[attribute_mapping['feedAttributeId']] = [ attribute_mapping['fieldId']] selector['paging']['startIndex'] += PAGE_SIZE more_pages = selector['paging']['startIndex'] < int(page['totalNumEntries']) return attribute_mappings
152,980
Get the Platform Restrictions for a given Campaign Feed. Args: campaign_feed: the Campaign Feed we are retrieving Platform Restrictons for. Returns: The Platform Restrictions for the given feed.
def GetPlatformRestrictions(campaign_feed): platform_restrictions = None if campaign_feed['matchingFunction']['operator'] == 'AND': for argument in campaign_feed['matchingFunction']['lhsOperand']: # Check if matchingFunction is EQUALS(CONTEXT.DEVICE, 'Mobile') if argument['value']['operator'] == 'EQUALS': request_context_operand = argument['value']['lhsOperand'][0] if (request_context_operand and request_context_operand == 'DEVICE_PLATFORM'): # This needs to be capitalized for ExtensionSettingPlatform. platform_restrictions = argument['value']['rhsOperand'][0].upper() return platform_restrictions
152,981
Gets the sitelinks from a feed. Args: client: an AdWordsClient instance. feed: the feed used to retrieve sitelinks. Returns: A dictionary mapping the feed item ID to SiteLinkFromFeed.
def GetSitelinksFromFeed(client, feed): # Retrieve the feed's attribute mapping. feed_mappings = GetFeedMapping(client, feed, PLACEHOLDER_TYPE_SITELINKS) feed_items = {} for feed_item in GetFeedItems(client, feed): site_link_from_feed = {} for attribute_value in feed_item['attributeValues']: if attribute_value['feedAttributeId'] in feed_mappings: for field_id in feed_mappings[attribute_value['feedAttributeId']]: if field_id == SITE_LINK_FIELDS['TEXT']: site_link_from_feed['text'] = attribute_value['stringValue'] elif field_id == SITE_LINK_FIELDS['URL']: site_link_from_feed['url'] = attribute_value['stringValue'] elif field_id == SITE_LINK_FIELDS['FINAL_URLS']: site_link_from_feed['finalUrls'] = attribute_value['stringValues'] elif field_id == SITE_LINK_FIELDS['FINAL_MOBILE_URLS']: site_link_from_feed['finalMobileUrls'] = attribute_value[ 'stringValues'] elif field_id == SITE_LINK_FIELDS['TRACKING_URL_TEMPLATE']: site_link_from_feed['trackingUrlTemplate'] = attribute_value[ 'stringValue'] elif field_id == SITE_LINK_FIELDS['LINE2']: site_link_from_feed['line2'] = attribute_value['stringValue'] elif field_id == SITE_LINK_FIELDS['LINE3']: site_link_from_feed['line3'] = attribute_value['stringValue'] else: print 'No applicable Site Link Field found for Id: %s' % field_id feed_items[feed_item['feedItemId']] = site_link_from_feed return feed_items
152,982
Builds the operations adding a Keyword Criterion to each AdGroup. Args: adgroup_operations: a list containing the operations that will add AdGroups. number_of_keywords: an int defining the number of Keywords to be created. Returns: a list containing the operations that will create a new Keyword Criterion associated with each provided AdGroup.
def BuildAdGroupCriterionOperations(adgroup_operations, number_of_keywords=1): criterion_operations = [ { # The xsi_type of the operation can usually be guessed by the API # because a given service only handles one type of operation. # However, batch jobs process operations of different types, so # the xsi_type must always be explicitly defined for these # operations. 'xsi_type': 'AdGroupCriterionOperation', 'operand': { 'xsi_type': 'BiddableAdGroupCriterion', 'adGroupId': adgroup_operation['operand']['id'], 'criterion': { 'xsi_type': 'Keyword', # Make 50% of keywords invalid to demonstrate error handling. 'text': 'mars%s%s' % (i, '!!!' if i % 2 == 0 else ''), 'matchType': 'BROAD' } }, 'operator': 'ADD' } for adgroup_operation in adgroup_operations for i in range(number_of_keywords)] return criterion_operations
152,993
Creates an image and uploads it to the server. Args: media_service: a SudsServiceProxy instance for AdWords's MediaService. opener: an OpenerDirector instance. url: a str URL used to load image data. Returns: The image that was successfully uploaded.
def _CreateImage(media_service, opener, url): # Note: The utf-8 decode is for 2to3 Python 3 compatibility. image_data = opener.open(url).read().decode('utf-8') image = { 'type': 'IMAGE', 'data': image_data, 'xsi_type': 'Image' } return media_service.upload(image)[0]
152,997
Creates settings for dynamic display ad. Args: media_service: a SudsServiceProxy instance for AdWords's MediaService. opener: an OpenerDirector instance. Returns: The dynamic display ad settings.
def _CreateDynamicDisplayAdSettings(media_service, opener): image = _CreateImage(media_service, opener, 'https://goo.gl/dEvQeF') logo = { 'type': 'IMAGE', 'mediaId': image['mediaId'], 'xsi_type': 'Image' } dynamic_settings = { 'landscapeLogoImage': logo, 'pricePrefix': 'as low as', 'promoText': 'Free shipping!', 'xsi_type': 'DynamicSettings', } return dynamic_settings
152,998
Creates a budget and returns its budgetId. Args: client: An AdWordsClient instance. Returns: An int budgetId for the created Budget.
def CreateBudget(client): budget_service = client.GetService('BudgetService', version='v201809') # Create a budget. budget = { 'name': 'Interplanetary Cruise App Budget #%s' % uuid.uuid4(), 'amount': { 'microAmount': '50000000' }, 'deliveryMethod': 'STANDARD', 'isExplicitlyShared': False } budget_operations = [{ 'operator': 'ADD', 'operand': budget }] # Create the budget and return its ID. budget_id = budget_service.mutate(budget_operations)['value'][0]['budgetId'] return budget_id
153,006
Sets targeting criteria for the given campaign. Args: client: An AdWordsClient instance. campaign: A suds object representing the campaign we wish to attach targeting criteria.
def SetCampaignTargetingCriteria(client, campaign): campaign_criterion_service = client.GetService('CampaignCriterionService') # Create locations. The IDs can be found in the documentation or retrieved # with the LocationCriterionService. criteria = [ { 'xsi_type': 'Location', 'id': 21137 # California }, { 'xsi_type': 'Location', 'id': 2484 # Mexico }, { 'xsi_type': 'Language', 'id': 1000 # English }, { 'xsi_type': 'Language', 'id': 1003 # Spanish } ] operations = [{ 'operator': 'ADD', 'operand': { 'campaignId': campaign['id'], 'criterion': criterion } } for criterion in criteria] response = campaign_criterion_service.mutate(operations) if response and 'value' in response: # Display the added campaign targets. for criterion in response['value']: print ('Campaign criteria of type "%s" and id "%s" was added.' % (criterion['criterion']['type'], criterion['criterion']['id']))
153,007
Displays an account tree. Args: account: dict The account to display. accounts: dict Map from customerId to account. links: dict Map from customerId to child links. depth: int Depth of the current account in the tree.
def DisplayAccountTree(account, accounts, links, depth=0): prefix = '-' * depth * 2 print '%s%s, %s' % (prefix, account['customerId'], account['name']) if account['customerId'] in links: for child_link in links[account['customerId']]: child_account = accounts[child_link['clientCustomerId']] DisplayAccountTree(child_account, accounts, links, depth + 1)
153,016
Creates a bidding strategy object. Args: client: AdWordsClient the client to run the example with. Returns: dict An object representing a bidding strategy.
def CreateBiddingStrategy(client): # Initialize appropriate service. bidding_strategy_service = client.GetService( 'BiddingStrategyService', version='v201809') # Create a shared bidding strategy. shared_bidding_strategy = { 'name': 'Maximize Clicks %s' % uuid.uuid4(), 'biddingScheme': { 'xsi_type': 'TargetSpendBiddingScheme', # Optionally set additional bidding scheme parameters. 'bidCeiling': { 'microAmount': '2000000' } } } # Create operation. operation = { 'operator': 'ADD', 'operand': shared_bidding_strategy } response = bidding_strategy_service.mutate([operation]) new_bidding_strategy = response['value'][0] print ('Shared bidding strategy with name "%s" and ID "%s" of type "%s"' 'was created.' % (new_bidding_strategy['name'], new_bidding_strategy['id'], new_bidding_strategy['biddingScheme']['BiddingScheme.Type'])) return new_bidding_strategy
153,045
Creates an explicit budget to be used only to create the Campaign. Args: client: AdWordsClient the client to run the example with. Returns: dict An object representing a shared budget.
def CreateSharedBudget(client): # Initialize appropriate service. budget_service = client.GetService('BudgetService', version='v201809') # Create a shared budget budget = { 'name': 'Shared Interplanetary Budget #%s' % uuid.uuid4(), 'amount': { 'microAmount': '2000000' }, 'deliveryMethod': 'STANDARD', 'isExplicitlyShared': 'true' } # Create operation. operation = { 'operator': 'ADD', 'operand': budget } response = budget_service.mutate([operation]) return response['value'][0]
153,046
Create a Campaign with a Shared Bidding Strategy. Args: client: AdWordsClient the client to run the example with. bidding_strategy_id: string the bidding strategy ID to use. budget_id: string the shared budget ID to use. Returns: dict An object representing a campaign.
def CreateCampaignWithBiddingStrategy(client, bidding_strategy_id, budget_id): # Initialize appropriate service. campaign_service = client.GetService('CampaignService', version='v201809') # Create campaign. campaign = { 'name': 'Interplanetary Cruise #%s' % uuid.uuid4(), 'budget': { 'budgetId': budget_id }, 'biddingStrategyConfiguration': { 'biddingStrategyId': bidding_strategy_id }, 'advertisingChannelType': 'SEARCH', 'networkSetting': { 'targetGoogleSearch': 'true', 'targetSearchNetwork': 'true', 'targetContentNetwork': 'true' } } # Create operation. operation = { 'operator': 'ADD', 'operand': campaign } response = campaign_service.mutate([operation]) new_campaign = response['value'][0] print ('Campaign with name "%s", ID "%s" and bidding scheme ID "%s" ' 'was created.' % (new_campaign['name'], new_campaign['id'], new_campaign['biddingStrategyConfiguration']['biddingStrategyId'])) return new_campaign
153,047
Retrieves all CustomerIds in the account hierarchy. Note that your configuration file must specify a client_customer_id belonging to an AdWords manager account. Args: client: an AdWordsClient instance. Raises: Exception: if no CustomerIds could be found. Returns: A Queue instance containing all CustomerIds in the account hierarchy.
def GetCustomerIDs(client): # For this example, we will use ManagedCustomerService to get all IDs in # hierarchy that do not belong to MCC accounts. managed_customer_service = client.GetService('ManagedCustomerService', version='v201809') offset = 0 # Get the account hierarchy for this account. selector = { 'fields': ['CustomerId'], 'predicates': [{ 'field': 'CanManageClients', 'operator': 'EQUALS', 'values': [False] }], 'paging': { 'startIndex': str(offset), 'numberResults': str(PAGE_SIZE) } } # Using Queue to balance load between processes. queue = multiprocessing.Queue() more_pages = True while more_pages: page = managed_customer_service.get(selector) if page and 'entries' in page and page['entries']: for entry in page['entries']: queue.put(entry['customerId']) else: raise Exception('Can\'t retrieve any customer ID.') offset += PAGE_SIZE selector['paging']['startIndex'] = str(offset) more_pages = offset < int(page['totalNumEntries']) return queue
153,055
Initializes an AdWordsReportError. Args: code: The HTTP status code number that was returned. error: The urllib2.HTTPError (Python 2) or urllib.error.HTTPError (Python 3) describing the failure. content: The HTTP response body as a string. [optional] message: A user-friendly error message string. If one is not provided, a default message will be used.
def __init__(self, code, error, content, message=None): super(AdWordsReportError, self).__init__( message if message else ('AdWords report download failed with HTTP ' 'status code: %s' % code)) self.code = code self.error = error self.content = content
153,065
Initializes a AdManagerReportError. Args: report_job_id: The ID of the report job which failed.
def __init__(self, report_job_id): super(AdManagerReportError, self).__init__( 'Ad Manager report job failed. The ID of the failed report is: %s' % report_job_id) self.report_job_id = report_job_id
153,067
Creates a new Display Network campaign. Args: client: an AdWordsClient instance. merchant_id: a int merchant center ID. budget_id: a int budget ID. Returns: The campaign that was successfully created.
def CreateCampaign(client, merchant_id, budget_id): campaign_service = client.GetService('CampaignService', 'v201809') campaign = { 'name': 'Shopping campaign #%d' % uuid.uuid4(), # Dynamic remarketing campaigns are only available on the Google Display # Network. 'advertisingChannelType': 'DISPLAY', 'status': 'PAUSED', 'budget': { 'budgetId': budget_id }, # This example uses a Manual CPC bidding strategy, but you should select # the strategy that best aligns with your sales goals. More details here: # https://support.google.com/adwords/answer/2472725 'biddingStrategyConfiguration': { 'biddingStrategyType': 'MANUAL_CPC' }, 'settings': [{ 'xsi_type': 'ShoppingSetting', # Campaigns with numerically higher priorities take precedence over # those with lower priorities. 'campaignPriority': 0, 'merchantId': merchant_id, # Display network campaigns do not support partition by country. The # only supported value is "ZZ". This signals that products from all # countries are available in this campaign. The actual products which # serve are based on the products tagged in the user list entry. 'salesCountry': 'ZZ', # Optional: Enable local inventory ads (items for sale in physical # stores.) 'enableLocal': True, }] } operations = [{ 'operator': 'ADD', 'operand': campaign }] return campaign_service.mutate(operations)['value'][0]
153,081
Creates a dynamic remarketing campaign. Args: client: an AdWordsClient instance. campaign_id: an int campaign ID. Returns: The ad group that was successfully created.
def CreateAdGroup(client, campaign_id): ad_group_service = client.GetService('AdGroupService', 'v201809') ad_group = { 'name': 'Dynamic remarketing ad group', 'campaignId': campaign_id, 'status': 'ENABLED' } operations = [{ 'operator': 'ADD', 'operand': ad_group }] return ad_group_service.mutate(operations)['value'][0]
153,082
Creates a ResponsiveDisplayAd. Args: client: an AdWordsClient instance. opener: an OpenerDirector instance. ad_group_id: an int ad group ID. Returns: The ad group ad that was successfully created.
def CreateAd(client, opener, ad_group_id): ad_group_ad_service = client.GetService('AdGroupAdService', 'v201809') media_service = client.GetService('MediaService', 'v201809') marketing_image_id = _CreateImage( media_service, opener, 'https://goo.gl/3b9Wfh') logo_image_id = _CreateImage(media_service, opener, 'https://goo.gl/mtt54n') ad = { 'xsi_type': 'ResponsiveDisplayAd', # This ad format doesn't allow the creation of an image using the # Image.data field. An image must first be created using the MediaService, # and Image.mediaId must be populated when creating the ad. 'marketingImage': { 'xsi_type': 'Image', 'mediaId': marketing_image_id }, 'shortHeadline': 'Travel', 'longHeadline': 'Travel the World', 'description': 'Take to the air!', 'businessName': 'Interplanetary Cruises', 'finalUrls': ['http://wwww.example.com'], # Optional: Call to action text. # Valid texts: https://support.google.com/adwords/answer/7005917 'callToActionText': 'Apply Now', # Optional: Set dynamic display ad settings, composed of landscape logo # image, promotion text, and price prefix. 'dynamicDisplayAdSettings': CreateDynamicDisplayAdSettings( client, opener), # Optional: Create a logo image and set it to the ad. 'logoImage': { 'xsi_type': 'Image', 'mediaId': logo_image_id }, # Optional: Create a square marketing image and set it to the ad. 'squareMarketingImage': { 'xsi_type': 'Image', 'mediaId': logo_image_id }, # Whitelisted accounts only: Set color settings using hexadecimal values. # Set allowFlexibleColor to False if you want your ads to render by always # using your colors strictly. # 'mainColor': '#000fff', # 'accentColor': '#fff000', # 'allowFlexibleColor': False, # Whitelisted accounts only: Set the format setting that the ad will be # served in. # 'formatSetting': 'NON_NATIVE' } ad_group_ad = { 'ad': ad, 'adGroupId': ad_group_id } operations = [{ 'operation': 'ADD', 'operand': ad_group_ad }] return ad_group_ad_service.mutate(operations)['value'][0]
153,083
Links the provided ad group and user list. Args: client: an AdWordsClient instance. ad_group_id: an int ad group ID. user_list_id: an int user list ID. Returns: The ad group criterion that was successfully created.
def AttachUserList(client, ad_group_id, user_list_id): ad_group_criterion_service = client.GetService( 'AdGroupCriterionService', 'v201809') user_list = { 'xsi_type': 'CriterionUserList', 'userListId': user_list_id } ad_group_criterion = { 'xsi_type': 'BiddableAdGroupCriterion', 'criterion': user_list, 'adGroupId': ad_group_id } operations = [{ 'operator': 'ADD', 'operand': ad_group_criterion }] return ad_group_criterion_service.mutate(operations)['value'][0]
153,084
Creates dynamic display ad settings. Args: client: an AdWordsClient instance. opener: an OpenerDirector instance. Returns: A dict containing the dynamic display ad settings.
def CreateDynamicDisplayAdSettings(client, opener): media_service = client.GetService('MediaService', 'v201809') logo = { 'xsi_type': 'Image', 'mediaId': _CreateImage(media_service, opener, 'https://goo.gl/dEvQeF') } dynamic_settings = { 'landscapeLogoImage': logo, 'pricePrefix': 'as low as', 'promoText': 'Free shipping!' } return dynamic_settings
153,085
Retrieves the scope for the given API name. Args: api_name: A string identifying the name of the API we want to retrieve a scope for. Returns: A string that is the scope for the given API name. Raises: GoogleAdsValueError: If the given api_name is invalid; accepted values are "adwords" and "ad_manager".
def GetAPIScope(api_name): try: return SCOPES[api_name] except KeyError: raise googleads.errors.GoogleAdsValueError( 'Invalid API name "%s" provided. Acceptable values are: %s' % (api_name, SCOPES.keys()))
153,091
Initializes a GoogleAccessTokenClient. Args: access_token: A string containing your access token. token_expiry: A datetime instance indicating when the given access token expires.
def __init__(self, access_token, token_expiry): self.creds = google.oauth2.credentials.Credentials( token=access_token) self.creds.expiry = token_expiry
153,092
Creates the feed for DSA page URLs. Args: client: an AdWordsClient instance. Returns: A _DSAFeedDetails instance containing details about the created feed.
def _CreateFeed(client): # Get the FeedService. feed_service = client.GetService('FeedService', version='v201809') # Create operation. operation = { # Create the feed. 'operand': { 'name': 'DSA Feed %s' % uuid.uuid4(), # Create attributes. 'attributes': [ {'type': 'URL_LIST', 'name': 'Page URL'}, {'type': 'STRING_LIST', 'name': 'Label'} ], 'origin': 'USER' }, 'operator': 'ADD' } # Add the feed. feed = feed_service.mutate([operation])['value'][0] return _DSAFeedDetails(feed['id'], feed['attributes'][0]['id'], feed['attributes'][1]['id'])
153,124
Creates the feed mapping for DSA page feeds. Args: client: an AdWordsClient instance. feed_details: a _DSAFeedDetails instance.
def _CreateFeedMapping(client, feed_details): # Get the FeedMappingService. feed_mapping_service = client.GetService('FeedMappingService', version='v201809') # Create the operation. operation = { # Create the feed mapping. 'operand': { 'criterionType': DSA_PAGE_FEED_CRITERION_TYPE, 'feedId': feed_details.feed_id, # Map the feedAttributeIds to the fieldId constants. 'attributeFieldMappings': [ { 'feedAttributeId': feed_details.url_attribute_id, 'fieldId': DSA_PAGE_URLS_FIELD_ID }, { 'feedAttributeId': feed_details.label_attribute_id, 'fieldId': DSA_LABEL_FIELD_ID } ] }, 'operator': 'ADD' } # Add the feed mapping. feed_mapping_service.mutate([operation])
153,125
Creates the page URLs in the DSA page feed. Args: client: an AdWordsClient instance. feed_details: a _DSAFeedDetails instance. label_name: a str containing the page feed URL label.
def _CreateFeedItems(client, feed_details, label_name): # Get the FeedItemService. feed_item_service = client.GetService('FeedItemService', version='v201809') # For page feed URL recommendations and rules, see: # https://support.google.com/adwords/answer/7166527 urls = ('http://www.example.com/discounts/rental-cars?id={feeditem}', 'http://www.example.com/discounts/hotel-deals?id={feeditem}', 'http://www.example.com/discounts/flight-deals?id={feeditem}') # Create the operation. operations = [{ # Create the feed item. 'operand': { 'feedId': feed_details.feed_id, 'attributeValues': [ { 'feedAttributeId': feed_details.url_attribute_id, 'stringValues': [url] }, { 'feedAttributeId': feed_details.label_attribute_id, 'stringValues': [label_name] } ] }, 'operator': 'ADD' } for url in urls] # Add the feed item. feed_item_service.mutate(operations)
153,126
Updates the campaign DSA setting to DSA pagefeeds. Args: client: an AdWordsClient instance. campaign_id: a str Campaign ID. feed_id: a str page Feed ID. Raises: ValueError: If the given campaign is found not to be a dynamic search ad campaign.
def _UpdateCampaignDSASetting(client, campaign_id, feed_id): # Get the CampaignService. campaign_service = client.GetService('CampaignService', version='v201809') selector = { 'fields': ['Id', 'Settings'], 'predicates': [{ 'field': 'Id', 'operator': 'EQUALS', 'values': [campaign_id] }] } response = campaign_service.get(selector) if response['totalNumEntries']: campaign = response['entries'][0] else: raise ValueError('No campaign with ID "%d" exists.' % campaign_id) if not campaign['settings']: raise ValueError('This is not a DSA campaign.') dsa_setting = None campaign_settings = campaign['settings'] for setting in campaign_settings: if setting['Setting.Type'] == 'DynamicSearchAdsSetting': dsa_setting = setting break if dsa_setting is None: raise ValueError('This is not a DSA campaign.') dsa_setting['pageFeed'] = { 'feedIds': [feed_id] } # Optional: Specify whether only the supplied URLs should be used with your # Dynamic Search Ads. dsa_setting['useSuppliedUrlsOnly'] = True operation = { 'operand': { 'id': campaign_id, 'settings': campaign_settings }, 'operator': 'SET' } campaign_service.mutate([operation]) print 'DSA page feed for campaign ID "%d" was updated with feed ID "%d".' % ( campaign_id, feed_id)
153,127
Set custom targeting for the page feed URLs based on a list of labels. Args: client: an AdWordsClient instance. ad_group_id: a str AdGroup ID. label_name: a str label name. Returns: A suds.sudsobject.Object representing the newly created webpage criterion.
def _AddDSATargeting(client, ad_group_id, label_name): # Get the AdGroupCriterionService. ad_group_criterion_service = client.GetService('AdGroupCriterionService', version='v201809') # Create the operation. operation = { 'operand': { 'xsi_type': 'BiddableAdGroupCriterion', 'adGroupId': ad_group_id, # Create a webpage criterion. 'criterion': { 'xsi_type': 'Webpage', 'parameter': { 'criterionName': 'Test criterion', # Add a condition for label=specified_label_name. 'conditions': [{ 'operand': 'CUSTOM_LABEL', 'argument': label_name }], } }, # Set a custom bid for this criterion 'biddingStrategyConfiguration': { 'bids': [{ 'xsi_type': 'CpcBid', 'bid': { 'microAmount': 1500000 } }] } }, 'operator': 'ADD' } criterion = ad_group_criterion_service.mutate([operation])['value'][0] print 'Web page criterion with ID "%d" and status "%s" was created.' % ( criterion['criterion']['id'], criterion['userStatus']) return criterion
153,128
Get season-level pitching data aggregated by team. ARGUMENTS: start_season : int : first season you want data for (or the only season if you do not specify an end_season) end_season : int : final season you want data for league : "all", "nl", or "al" ind : int : =1 if you want individual season level data, =0 if you want a team'ss aggreagate data over all seasons in the query
def team_pitching(start_season, end_season=None, league='all', ind=1): if start_season is None: raise ValueError("You need to provide at least one season to collect data for. Try team_pitching(season) or team_pitching(start_season, end_season).") if end_season is None: end_season = start_season soup = get_soup(start_season=start_season, end_season=end_season, league=league, ind=ind) table = get_table(soup, ind) table = postprocessing(table) return table
153,512
Get season-level Pitching Statistics for Specific Team (from Baseball-Reference) ARGUMENTS: team : str : The Team Abbreviation (i.e. 'NYY' for Yankees) of the Team you want data for start_season : int : first season you want data for (or the only season if you do not specify an end_season) end_season : int : final season you want data for
def team_pitching_bref(team, start_season, end_season=None): if start_season is None: raise ValueError("You need to provide at least one season to collect data for. Try team_pitching_bref(season) or team_pitching_bref(start_season, end_season).") if end_season is None: end_season = start_season url = "https://www.baseball-reference.com/teams/{}".format(team) data = [] headings = None for season in range(start_season, end_season+1): print("Getting Pitching Data: {} {}".format(season, team)) stats_url = "{}/{}.shtml".format(url, season) response = requests.get(stats_url) soup = BeautifulSoup(response.content, 'html.parser') table = soup.find_all('table', {'id': 'team_pitching'})[0] if headings is None: headings = [row.text.strip() for row in table.find_all('th')[1:34]] rows = table.find_all('tr') for row in rows: cols = row.find_all('td') cols = [ele.text.strip() for ele in cols] cols = [col.replace('*', '').replace('#', '') for col in cols] # Removes '*' and '#' from some names cols = [col for col in cols if 'Totals' not in col and 'NL teams' not in col and 'AL teams' not in col] # Removes Team Totals and other rows cols.insert(2, season) data.append([ele for ele in cols[0:]]) headings.insert(2, "Year") data = pd.DataFrame(data=data, columns=headings) # [:-5] # -5 to remove Team Totals and other rows (didn't work in multi-year queries) data = data.dropna() # Removes Row of All Nones return data
153,513
For given `peer` re-send sent paths. Parameters: - `route-family`: (RouteFamily) of the sent paths to re-send - `peer`: (Peer) peer for which we need to re-send sent paths
def resend_sent(self, route_family, peer): if peer not in self._peers.values(): raise ValueError('Could not find given peer (%s)' % peer) if route_family not in SUPPORTED_GLOBAL_RF: raise ValueError( 'Given route family (%s) is not supported.' % route_family ) # Iterate over the global table for given afi, safi and enqueue # out-going routes. table = self._table_manager.get_global_table_by_route_family( route_family ) for destination in table.values(): # Check if this destination's sent - routes include this peer. # i.e. check if this destinations was advertised and enqueue # the path only if it was. If the current best-path has not been # advertised before, it might already have a OutgoingRoute queued # to be sent to the peer. sent_routes = destination.sent_routes if sent_routes is None or len(sent_routes) == 0: continue for sent_route in sent_routes: if sent_route.sent_peer == peer: # update med - if previously med was set per neighbor or # wasn't set at all now it could have changed and we may # need to set new value there p = sent_route.path if p.med_set_by_target_neighbor or p.get_pattr( BGP_ATTR_TYPE_MULTI_EXIT_DISC) is None: sent_route.path = \ clone_path_and_update_med_for_target_neighbor( sent_route.path, peer.med ) ogr = OutgoingRoute(sent_route.path, for_route_refresh=True) peer.enque_outgoing_msg(ogr)
155,549
Returns a `Notification` message corresponding to given codes. Parameters: - `code`: (int) BGP error code - `subcode`: (int) BGP error sub-code
def notification_factory(code, subcode): notification = BGPNotification(code, subcode) if not notification.reason: raise ValueError('Invalid code/sub-code.') return notification
155,689
Sends open message to peer and handles received messages. Parameters: - `peer`: the peer to which this protocol instance is connected to.
def _run(self, peer): # We know the peer we are connected to, we send open message. self._peer = peer self.connection_made() # We wait for peer to send messages. self._recv_loop()
155,696
Sends given message `rr_msg` to peer. Parameters: - rr_msg: (RouteRefresh) route refresh message to send to peer. Update appropriate counters and set appropriate timers.
def _send_outgoing_route_refresh_msg(self, rr_msg): assert rr_msg.type == BGP_MSG_ROUTE_REFRESH self._protocol.send(rr_msg) LOG.debug('RouteRefresh %s>> %s', self._neigh_conf.ip_address, rr_msg) # Collect update statistics for sent refresh request. if rr_msg.demarcation == 0: self.state.incr(PeerCounterNames.SENT_REFRESH) # If SOR is sent, we set Max. EOR timer if needed. elif (rr_msg.demarcation == 1 and self._common_conf.refresh_max_eor_time != 0): eor_timer = self._common_conf.refresh_max_eor_time # Set timer to send EOR demarcation. self._spawn_after('end-of-rib-timer', eor_timer, self._enqueue_eor_msg, rr_msg) LOG.debug('Enhanced RR max. EOR timer set.')
156,706
Request route refresh to peer for given `route_families`. If no `route_families` are given, we make request for all supported route families with this peer. Parameters: - `route_families`: list of route families to request route refresh for. If this peer is currently not in Established state, we raise exception. If any of the `route_families` are invalid we raise exception.
def request_route_refresh(self, *route_families): # If this peer has not established session yet if not self.in_established: raise ValueError('Peer not in established state to satisfy' ' this request.') skip_validation = False # If request is made for all supported route_families for current # session, we collect all route_families for valid for current session. if len(route_families) == 0: route_families = [] # We skip validation of route families that we collect ourselves # below. skip_validation = True for route_family in SUPPORTED_GLOBAL_RF: if self.is_mbgp_cap_valid(route_family): route_families.append(route_family) for route_family in route_families: if (skip_validation or ((route_family in SUPPORTED_GLOBAL_RF) and # We ignore request for route_family not valid # for current session. self._protocol.is_mbgp_cap_valid(route_family))): rr_req = BGPRouteRefresh(route_family.afi, route_family.safi) self.enque_outgoing_msg(rr_req) LOG.debug('Enqueued Route Refresh message to ' 'peer %s for rf: %s', self, route_family)
156,709
Extracts new paths advertised in the given update message's *MpReachNlri* attribute. Assumes MPBGP capability is enabled and message was validated. Parameters: - update_msg: (Update) is assumed to be checked for all bgp message errors. - valid_rts: (iterable) current valid/configured RTs. Extracted paths are added to appropriate *Destination* for further processing.
def _extract_and_handle_bgp4_new_paths(self, update_msg): umsg_pattrs = update_msg.pathattr_map next_hop = update_msg.get_path_attr(BGP_ATTR_TYPE_NEXT_HOP).value # Nothing to do if we do not have any new NLRIs in this message. msg_nlri_list = update_msg.nlri if not msg_nlri_list: LOG.debug('Update message did not have any new MP_REACH_NLRIs.') return # Create path instances for each NLRI from the update message. for msg_nlri in msg_nlri_list: LOG.debug('NLRI: %s', msg_nlri) new_path = bgp_utils.create_path( self, msg_nlri, pattrs=umsg_pattrs, nexthop=next_hop ) LOG.debug('Extracted paths from Update msg.: %s', new_path) block, blocked_cause = self._apply_in_filter(new_path) nlri_str = new_path.nlri.formatted_nlri_str received_route = ReceivedRoute(new_path, self, block) self._adj_rib_in[nlri_str] = received_route self._signal_bus.adj_rib_in_changed(self, received_route) if not block: # Update appropriate table with new paths. tm = self._core_service.table_manager tm.learn_path(new_path) else: LOG.debug('prefix : %s is blocked by in-bound filter: %s', msg_nlri, blocked_cause) # If update message had any qualifying new paths, do some book-keeping. if msg_nlri_list: # Update prefix statistics. self.state.incr(PeerCounterNames.RECV_PREFIXES, incr_by=len(msg_nlri_list)) # Check if we exceed max. prefixes allowed for this neighbor. if self._neigh_conf.exceeds_max_prefix_allowed( self.state.get_count(PeerCounterNames.RECV_PREFIXES)): LOG.error('Max. prefix allowed for this neighbor ' 'exceeded.')
156,724
Extracts withdraws advertised in the given update message's *MpUnReachNlri* attribute. Assumes MPBGP capability is enabled. Parameters: - update_msg: (Update) is assumed to be checked for all bgp message errors. Extracted withdraws are added to appropriate *Destination* for further processing.
def _extract_and_handle_bgp4_withdraws(self, withdraw_list): msg_rf = RF_IPv4_UC w_nlris = withdraw_list if not w_nlris: # If this is EOR of some kind, handle it self._handle_eor(msg_rf) for w_nlri in w_nlris: w_path = bgp_utils.create_path( self, w_nlri, is_withdraw=True ) block, blocked_cause = self._apply_in_filter(w_path) received_route = ReceivedRoute(w_path, self, block) nlri_str = w_nlri.formatted_nlri_str if nlri_str in self._adj_rib_in: del self._adj_rib_in[nlri_str] self._signal_bus.adj_rib_in_changed(self, received_route) if not block: # Update appropriate table with withdraws. tm = self._core_service.table_manager tm.learn_path(w_path) else: LOG.debug('prefix : %s is blocked by in-bound filter: %s', nlri_str, blocked_cause)
156,725
Extracts new paths advertised in the given update message's *MpReachNlri* attribute. Assumes MPBGP capability is enabled and message was validated. Parameters: - update_msg: (Update) is assumed to be checked for all bgp message errors. - valid_rts: (iterable) current valid/configured RTs. Extracted paths are added to appropriate *Destination* for further processing.
def _extract_and_handle_mpbgp_new_paths(self, update_msg): umsg_pattrs = update_msg.pathattr_map mpreach_nlri_attr = umsg_pattrs.get(BGP_ATTR_TYPE_MP_REACH_NLRI) assert mpreach_nlri_attr msg_rf = mpreach_nlri_attr.route_family # Check if this route family is among supported route families. if msg_rf not in SUPPORTED_GLOBAL_RF: LOG.info(('Received route for route family %s which is' ' not supported. Ignoring paths from this UPDATE: %s') % (msg_rf, update_msg)) return if msg_rf in (RF_IPv4_VPN, RF_IPv6_VPN): # Check if we have Extended Communities Attribute. # TODO(PH): Check if RT_NLRI afi/safi will ever have this attribute ext_comm_attr = umsg_pattrs.get(BGP_ATTR_TYPE_EXTENDED_COMMUNITIES) # Check if we have at-least one RT is of interest to us. if not ext_comm_attr: LOG.info('Missing Extended Communities Attribute. ' 'Ignoring paths from this UPDATE: %s', update_msg) return msg_rts = ext_comm_attr.rt_list # If we do not have any RTs associated with this msg., we do not # extract any paths. if not msg_rts: LOG.info('Received route with no RTs. Ignoring paths in this' ' UPDATE: %s', update_msg) return # If none of the RTs in the message are of interest, we do not # extract any paths. interested_rts = self._core_service.global_interested_rts if not interested_rts.intersection(msg_rts): LOG.info('Received route with RT %s that is of no interest to' ' any VRFs or Peers %s.' ' Ignoring paths from this UPDATE: %s', msg_rts, interested_rts, update_msg) return next_hop = mpreach_nlri_attr.next_hop # Nothing to do if we do not have any new NLRIs in this message. msg_nlri_list = mpreach_nlri_attr.nlri if not msg_nlri_list: LOG.debug('Update message did not have any new MP_REACH_NLRIs.') return # Create path instances for each NLRI from the update message. for msg_nlri in msg_nlri_list: new_path = bgp_utils.create_path( self, msg_nlri, pattrs=umsg_pattrs, nexthop=next_hop ) LOG.debug('Extracted paths from Update msg.: %s', new_path) block, blocked_cause = self._apply_in_filter(new_path) received_route = ReceivedRoute(new_path, self, block) nlri_str = msg_nlri.formatted_nlri_str self._adj_rib_in[nlri_str] = received_route self._signal_bus.adj_rib_in_changed(self, received_route) if not block: if msg_rf == RF_RTC_UC \ and self._init_rtc_nlri_path is not None: self._init_rtc_nlri_path.append(new_path) else: # Update appropriate table with new paths. tm = self._core_service.table_manager tm.learn_path(new_path) else: LOG.debug('prefix : %s is blocked by in-bound filter: %s', msg_nlri, blocked_cause) # If update message had any qualifying new paths, do some book-keeping. if msg_nlri_list: # Update prefix statistics. self.state.incr(PeerCounterNames.RECV_PREFIXES, incr_by=len(msg_nlri_list)) # Check if we exceed max. prefixes allowed for this neighbor. if self._neigh_conf.exceeds_max_prefix_allowed( self.state.get_count(PeerCounterNames.RECV_PREFIXES)): LOG.error('Max. prefix allowed for this neighbor ' 'exceeded.')
156,726
Extracts withdraws advertised in the given update message's *MpUnReachNlri* attribute. Assumes MPBGP capability is enabled. Parameters: - update_msg: (Update) is assumed to be checked for all bgp message errors. Extracted withdraws are added to appropriate *Destination* for further processing.
def _extract_and_handle_mpbgp_withdraws(self, mp_unreach_attr): msg_rf = mp_unreach_attr.route_family # Check if this route family is among supported route families. if msg_rf not in SUPPORTED_GLOBAL_RF: LOG.info( 'Received route family %s is not supported. ' 'Ignoring withdraw routes on this UPDATE message.', msg_rf) return w_nlris = mp_unreach_attr.withdrawn_routes if not w_nlris: # If this is EOR of some kind, handle it self._handle_eor(msg_rf) for w_nlri in w_nlris: w_path = bgp_utils.create_path( self, w_nlri, is_withdraw=True ) block, blocked_cause = self._apply_in_filter(w_path) received_route = ReceivedRoute(w_path, self, block) nlri_str = w_nlri.formatted_nlri_str if nlri_str in self._adj_rib_in: del self._adj_rib_in[nlri_str] self._signal_bus.adj_rib_in_changed(self, received_route) if not block: # Update appropriate table with withdraws. tm = self._core_service.table_manager tm.learn_path(w_path) else: LOG.debug('prefix : %s is blocked by in-bound filter: %s', w_nlri, blocked_cause)
156,727
Clones given RT NLRI `path`, and updates it with new RT_NLRI AS. Parameters: - `path`: (Path) RT_NLRI path - `new_rt_as`: AS value of cloned paths' RT_NLRI
def clone_rtcpath_update_rt_as(path, new_rt_as): assert path and new_rt_as if not path or path.route_family != RF_RTC_UC: raise ValueError('Expected RT_NLRI path') old_nlri = path.nlri new_rt_nlri = RouteTargetMembershipNLRI(new_rt_as, old_nlri.route_target) return RtcPath(path.source, new_rt_nlri, path.source_version_num, pattrs=path.pathattr_map, nexthop=path.nexthop, is_withdraw=path.is_withdraw)
157,167
Handles update of peer RT filter. Parameters: - `peer`: (Peer) whose RT filter has changed. - `new_rts`: (set) of new RTs that peer is interested in. - `old_rts`: (set) of RTs that peers is no longer interested in.
def _on_update_rt_filter(self, peer, new_rts, old_rts): for table in self._table_manager._global_tables.values(): if table.route_family == RF_RTC_UC: continue self._spawn('rt_filter_chg_%s' % peer, self._rt_mgr.on_rt_filter_chg_sync_peer, peer, new_rts, old_rts, table) LOG.debug('RT Filter change handler launched for route_family %s', table.route_family)
157,755
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.Assist = channel.stream_stream( '/google.assistant.embedded.v1alpha2.EmbeddedAssistant/Assist', request_serializer=google_dot_assistant_dot_embedded_dot_v1alpha2_dot_embedded__assistant__pb2.AssistRequest.SerializeToString, response_deserializer=google_dot_assistant_dot_embedded_dot_v1alpha2_dot_embedded__assistant__pb2.AssistResponse.FromString, )
158,211
Pretty prints events. Prints all events that occur with two spaces between each new conversation and a single space between turns of a conversation. Args: event(event.Event): The current event to process.
def process_event(event): if event.type == EventType.ON_CONVERSATION_TURN_STARTED: print() print(event) if (event.type == EventType.ON_CONVERSATION_TURN_FINISHED and event.args and not event.args['with_follow_on_turn']): print() if event.type == EventType.ON_DEVICE_ACTION: for command, params in event.actions: print('Do command', command, 'with params', str(params))
158,212
Constructor. Args: channel: A grpc.Channel.
def __init__(self, channel): self.Converse = channel.stream_stream( '/google.assistant.embedded.v1alpha1.EmbeddedAssistant/Converse', request_serializer=google_dot_assistant_dot_embedded_dot_v1alpha1_dot_embedded__assistant__pb2.ConverseRequest.SerializeToString, response_deserializer=google_dot_assistant_dot_embedded_dot_v1alpha1_dot_embedded__assistant__pb2.ConverseResponse.FromString, )
158,226
Read bytes from the stream and block until sample rate is achieved. Args: size: number of bytes to read from the stream.
def read(self, size): now = time.time() missing_dt = self._sleep_until - now if missing_dt > 0: time.sleep(missing_dt) self._sleep_until = time.time() + self._sleep_time(size) data = (self._wavep.readframes(size) if self._wavep else self._fp.read(size)) # When reach end of audio stream, pad remainder with silence (zeros). if not data: return b'\x00' * size return data
158,239
Updates a repository to the tip of `master`. If the repository cannot be updated because a hook that is configured does not exist in `master`, this raises a RepositoryCannotBeUpdatedError Args: repo_config - A config for a repository
def _update_repo(repo_config, store, tags_only): repo_path = store.clone(repo_config['repo'], repo_config['rev']) cmd_output('git', 'fetch', cwd=repo_path) tag_cmd = ('git', 'describe', 'origin/master', '--tags') if tags_only: tag_cmd += ('--abbrev=0',) else: tag_cmd += ('--exact',) try: rev = cmd_output(*tag_cmd, cwd=repo_path)[1].strip() except CalledProcessError: tag_cmd = ('git', 'rev-parse', 'origin/master') rev = cmd_output(*tag_cmd, cwd=repo_path)[1].strip() # Don't bother trying to update if our rev is the same if rev == repo_config['rev']: return repo_config try: path = store.clone(repo_config['repo'], rev) manifest = load_manifest(os.path.join(path, C.MANIFEST_FILE)) except InvalidManifestError as e: raise RepositoryCannotBeUpdatedError(six.text_type(e)) # See if any of our hooks were deleted with the new commits hooks = {hook['id'] for hook in repo_config['hooks']} hooks_missing = hooks - {hook['id'] for hook in manifest} if hooks_missing: raise RepositoryCannotBeUpdatedError( 'Cannot update because the tip of master is missing these hooks:\n' '{}'.format(', '.join(sorted(hooks_missing))), ) # Construct a new config with the head rev new_config = repo_config.copy() new_config['rev'] = rev return new_config
158,785
Format text with color. Args: text - Text to be formatted with color if `use_color` color - The color start string use_color_setting - Whether or not to color
def format_color(text, color, use_color_setting): if not use_color_setting: return text else: return '{}{}{}'.format(color, text, NORMAL)
158,796
Choose whether to use color based on the command argument. Args: setting - Either `auto`, `always`, or `never`
def use_color(setting): if setting not in COLOR_CHOICES: raise InvalidColorSetting(setting) return ( setting == 'always' or (setting == 'auto' and sys.stdout.isatty() and terminal_supports_color) )
158,797
Create a new instruction. Args: name (str): instruction name num_qubits (int): instruction's qubit width num_clbits (int): instructions's clbit width params (list[sympy.Basic|qasm.Node|int|float|complex|str|ndarray]): list of parameters Raises: QiskitError: when the register is not in the correct format.
def __init__(self, name, num_qubits, num_clbits, params): if not isinstance(num_qubits, int) or not isinstance(num_clbits, int): raise QiskitError("num_qubits and num_clbits must be integer.") if num_qubits < 0 or num_clbits < 0: raise QiskitError( "bad instruction dimensions: %d qubits, %d clbits." % num_qubits, num_clbits) self.name = name self.num_qubits = num_qubits self.num_clbits = num_clbits self._params = [] # a list of gate params stored # tuple (ClassicalRegister, int) when the instruction has a conditional ("if") self.control = None # list of instructions (and their contexts) that this instruction is composed of # empty definition means opaque or fundamental instruction self._definition = None self.params = params
159,082
Two instructions are the same if they have the same name, same dimensions, and same params. Args: other (instruction): other instruction Returns: bool: are self and other equal.
def __eq__(self, other): if type(self) is not type(other) or \ self.name != other.name or \ self.num_qubits != other.num_qubits or \ self.num_clbits != other.num_clbits or \ self.definition != other.definition: return False for self_param, other_param in zip_longest(self.params, other.params): if self_param == other_param: continue try: if numpy.isclose(float(self_param), float(other_param), atol=_CUTOFF_PRECISION): continue except TypeError: pass return False return True
159,083
shallow copy of the instruction. Args: name (str): name to be given to the copied circuit, if None then the name stays the same Returns: Instruction: a shallow copy of the current instruction, with the name updated if it was provided
def copy(self, name=None): cpy = copy.copy(self) if name: cpy.name = name return cpy
159,090
Run all the passes on a QuantumCircuit Args: circuit (QuantumCircuit): circuit to transform via all the registered passes Returns: QuantumCircuit: Transformed circuit.
def run(self, circuit): name = circuit.name dag = circuit_to_dag(circuit) del circuit for passset in self.working_list: for pass_ in passset: dag = self._do_pass(pass_, dag, passset.options) circuit = dag_to_circuit(dag) circuit.name = name return circuit
159,096
Do a pass and its "requires". Args: pass_ (BasePass): Pass to do. dag (DAGCircuit): The dag on which the pass is ran. options (dict): PassManager options. Returns: DAGCircuit: The transformed dag in case of a transformation pass. The same input dag in case of an analysis pass. Raises: TranspilerError: If the pass is not a proper pass instance.
def _do_pass(self, pass_, dag, options): # First, do the requires of pass_ if not options["ignore_requires"]: for required_pass in pass_.requires: dag = self._do_pass(required_pass, dag, options) # Run the pass itself, if not already run if pass_ not in self.valid_passes: if pass_.is_transformation_pass: pass_.property_set = self.fenced_property_set new_dag = pass_.run(dag) if not isinstance(new_dag, DAGCircuit): raise TranspilerError("Transformation passes should return a transformed dag." "The pass %s is returning a %s" % (type(pass_).__name__, type(new_dag))) dag = new_dag elif pass_.is_analysis_pass: pass_.property_set = self.property_set pass_.run(FencedDAGCircuit(dag)) else: raise TranspilerError("I dont know how to handle this type of pass") # update the valid_passes property self._update_valid_passes(pass_, options['ignore_preserves']) return dag
159,097
Removes a flow controller. Args: name (string): Name of the controller to remove. Raises: KeyError: If the controller to remove was not registered.
def remove_flow_controller(cls, name): if name not in cls.registered_controllers: raise KeyError("Flow controller not found: %s" % name) del cls.registered_controllers[name]
159,102
Apply a single qubit gate to the qubit. Args: gate(str): the single qubit gate name params(list): the operation parameters op['params'] Returns: tuple: a tuple of U gate parameters (theta, phi, lam) Raises: QiskitError: if the gate name is not valid
def single_gate_params(gate, params=None): if gate in ('U', 'u3'): return params[0], params[1], params[2] elif gate == 'u2': return np.pi / 2, params[0], params[1] elif gate == 'u1': return 0, 0, params[0] elif gate == 'id': return 0, 0, 0 raise QiskitError('Gate is not among the valid types: %s' % gate)
159,111
Get the matrix for a single qubit. Args: gate(str): the single qubit gate name params(list): the operation parameters op['params'] Returns: array: A numpy array representing the matrix
def single_gate_matrix(gate, params=None): # Converting sym to floats improves the performance of the simulator 10x. # This a is a probable a FIXME since it might show bugs in the simulator. (theta, phi, lam) = map(float, single_gate_params(gate, params)) return np.array([[np.cos(theta / 2), -np.exp(1j * lam) * np.sin(theta / 2)], [np.exp(1j * phi) * np.sin(theta / 2), np.exp(1j * phi + 1j * lam) * np.cos(theta / 2)]])
159,112
Build a ``DAGCircuit`` object from a ``QuantumCircuit``. Args: circuit (QuantumCircuit): the input circuit. Return: DAGCircuit: the DAG representing the input circuit.
def circuit_to_dag(circuit): dagcircuit = DAGCircuit() dagcircuit.name = circuit.name for register in circuit.qregs: dagcircuit.add_qreg(register) for register in circuit.cregs: dagcircuit.add_creg(register) for instruction, qargs, cargs in circuit.data: # Get arguments for classical control (if any) if instruction.control is None: control = None else: control = (instruction.control[0], instruction.control[1]) dagcircuit.apply_operation_back(instruction.copy(), qargs, cargs, control) return dagcircuit
159,116
Plot coherence data. Args: xdata ydata std_error fit fit_function xunit exp_str qubit_label Raises: ImportError: If matplotlib is not installed.
def plot_coherence(xdata, ydata, std_error, fit, fit_function, xunit, exp_str, qubit_label): if not HAS_MATPLOTLIB: raise ImportError('The function plot_coherence needs matplotlib. ' 'Run "pip install matplotlib" before.') plt.errorbar(xdata, ydata, std_error, marker='.', markersize=9, c='b', linestyle='') plt.plot(xdata, fit_function(xdata, *fit), c='r', linestyle='--', label=(exp_str + '= %s %s' % (str(round(fit[1])), xunit))) plt.xticks(fontsize=14, rotation=70) plt.yticks(fontsize=14) plt.xlabel('time [%s]' % (xunit), fontsize=16) plt.ylabel('P(1)', fontsize=16) plt.title(exp_str + ' measurement of Q$_{%s}$' % (str(qubit_label)), fontsize=18) plt.legend(fontsize=12) plt.grid(True) plt.show()
159,144
Take the raw rb data and convert it into averages and std dev Args: raw_rb (numpy.array): m x n x l list where m is the number of seeds, n is the number of Clifford sequences and l is the number of qubits Return: numpy_array: 2 x n x l list where index 0 is the mean over seeds, 1 is the std dev overseeds
def shape_rb_data(raw_rb): rb_data = [] rb_data.append(np.mean(raw_rb, 0)) rb_data.append(np.std(raw_rb, 0)) return rb_data
159,145
Extends a Layout with the idle nodes from coupling_map. Args: coupling_map (Coupling): directed graph representing a coupling map. layout (Layout): an existing layout. ancilla allocation occurs if the layout is smaller than the coupling_map.
def __init__(self, coupling_map, layout=None): super().__init__() self.coupling_map = coupling_map self.layout = layout self.ancilla_name = 'ancilla'
159,152
Validates the input to state visualization functions. Args: quantum_state (ndarray): Input state / density matrix. Returns: rho: A 2d numpy array for the density matrix. Raises: VisualizationError: Invalid input.
def _validate_input_state(quantum_state): rho = np.asarray(quantum_state) if rho.ndim == 1: rho = np.outer(rho, np.conj(rho)) # Check the shape of the input is a square matrix shape = np.shape(rho) if len(shape) != 2 or shape[0] != shape[1]: raise VisualizationError("Input is not a valid quantum state.") # Check state is an n-qubit state num = int(np.log2(rho.shape[0])) if 2 ** num != rho.shape[0]: raise VisualizationError("Input is not a multi-qubit quantum state.") return rho
159,154
Pick a convenient layout depending on the best matching qubit connectivity, and set the property `layout`. Args: dag (DAGCircuit): DAG to find layout for. Raises: TranspilerError: if dag wider than self.coupling_map
def run(self, dag): num_dag_qubits = sum([qreg.size for qreg in dag.qregs.values()]) if num_dag_qubits > self.coupling_map.size(): raise TranspilerError('Number of qubits greater than device.') best_sub = self._best_subset(num_dag_qubits) layout = Layout() map_iter = 0 for qreg in dag.qregs.values(): for i in range(qreg.size): layout[(qreg, i)] = int(best_sub[map_iter]) map_iter += 1 self.property_set['layout'] = layout
159,159
Computes the qubit mapping with the best connectivity. Args: n_qubits (int): Number of subset qubits to consider. Returns: ndarray: Array of qubits to use for best connectivity mapping.
def _best_subset(self, n_qubits): if n_qubits == 1: return np.array([0]) device_qubits = self.coupling_map.size() cmap = np.asarray(self.coupling_map.get_edges()) data = np.ones_like(cmap[:, 0]) sp_cmap = sp.coo_matrix((data, (cmap[:, 0], cmap[:, 1])), shape=(device_qubits, device_qubits)).tocsr() best = 0 best_map = None # do bfs with each node as starting point for k in range(sp_cmap.shape[0]): bfs = cs.breadth_first_order(sp_cmap, i_start=k, directed=False, return_predecessors=False) connection_count = 0 sub_graph = [] for i in range(n_qubits): node_idx = bfs[i] for j in range(sp_cmap.indptr[node_idx], sp_cmap.indptr[node_idx + 1]): node = sp_cmap.indices[j] for counter in range(n_qubits): if node == bfs[counter]: connection_count += 1 sub_graph.append([node_idx, node]) break if connection_count > best: best = connection_count best_map = bfs[0:n_qubits] # Return a best mapping that has reduced bandwidth mapping = {} for edge in range(best_map.shape[0]): mapping[best_map[edge]] = edge new_cmap = [[mapping[c[0]], mapping[c[1]]] for c in sub_graph] rows = [edge[0] for edge in new_cmap] cols = [edge[1] for edge in new_cmap] data = [1]*len(rows) sp_sub_graph = sp.coo_matrix((data, (rows, cols)), shape=(n_qubits, n_qubits)).tocsr() perm = cs.reverse_cuthill_mckee(sp_sub_graph) best_map = best_map[perm] return best_map
159,160
The matrix power of the channel. Args: n (int): compute the matrix power of the superoperator matrix. Returns: Kraus: the matrix power of the SuperOp converted to a Kraus channel. Raises: QiskitError: if the input and output dimensions of the QuantumChannel are not equal, or the power is not an integer.
def power(self, n): if n > 0: return super().power(n) return Kraus(SuperOp(self).power(n))
159,166
Return the QuantumChannel self + other. Args: other (complex): a complex number. Returns: Kraus: the scalar multiplication other * self as a Kraus object. Raises: QiskitError: if other is not a valid scalar.
def multiply(self, other): if not isinstance(other, Number): raise QiskitError("other is not a number") # If the number is complex we need to convert to general # kraus channel so we multiply via Choi representation if isinstance(other, complex) or other < 0: # Convert to Choi-matrix return Kraus(Choi(self).multiply(other)) # If the number is real we can update the Kraus operators # directly val = np.sqrt(other) kraus_r = None kraus_l = [val * k for k in self._data[0]] if self._data[1] is not None: kraus_r = [val * k for k in self._data[1]] return Kraus((kraus_l, kraus_r), self._input_dim, self._output_dim)
159,167
Evolve a quantum state by the QuantumChannel. Args: state (QuantumState): The input statevector or density matrix. qargs (list): a list of QuantumState subsystem positions to apply the operator on. Returns: QuantumState: the output quantum state. Raises: QiskitError: if the operator dimension does not match the specified QuantumState subsystem dimensions.
def _evolve(self, state, qargs=None): # If subsystem evolution we use the SuperOp representation if qargs is not None: return SuperOp(self)._evolve(state, qargs) # Otherwise we compute full evolution directly state = self._format_state(state) if state.shape[0] != self._input_dim: raise QiskitError( "QuantumChannel input dimension is not equal to state dimension." ) if state.ndim == 1 and self._data[1] is None and len( self._data[0]) == 1: # If we only have a single Kraus operator we can implement unitary-type # evolution of a state vector psi -> K[0].psi return np.dot(self._data[0][0], state) # Otherwise we always return a density matrix state = self._format_state(state, density_matrix=True) kraus_l, kraus_r = self._data if kraus_r is None: kraus_r = kraus_l return np.einsum('AiB,BC,AjC->ij', kraus_l, state, np.conjugate(kraus_r))
159,168
Return the tensor product channel. Args: other (QuantumChannel): a quantum channel subclass. reverse (bool): If False return self ⊗ other, if True return if True return (other ⊗ self) [Default: False Returns: Kraus: the tensor product channel as a Kraus object. Raises: QiskitError: if other cannot be converted to a channel.
def _tensor_product(self, other, reverse=False): # Convert other to Kraus if not isinstance(other, Kraus): other = Kraus(other) # Get tensor matrix ka_l, ka_r = self._data kb_l, kb_r = other._data if reverse: input_dims = self.input_dims() + other.input_dims() output_dims = self.output_dims() + other.output_dims() kab_l = [np.kron(b, a) for a in ka_l for b in kb_l] else: input_dims = other.input_dims() + self.input_dims() output_dims = other.output_dims() + self.output_dims() kab_l = [np.kron(a, b) for a in ka_l for b in kb_l] if ka_r is None and kb_r is None: kab_r = None else: if ka_r is None: ka_r = ka_l if kb_r is None: kb_r = kb_l if reverse: kab_r = [np.kron(b, a) for a in ka_r for b in kb_r] else: kab_r = [np.kron(a, b) for a in ka_r for b in kb_r] data = (kab_l, kab_r) return Kraus(data, input_dims, output_dims)
159,169