我正在尝试从谷歌分析帐户检索数据。 到目前为止,应用程序脚本代码正在运行。
function runReport(sheetName, dimensionArr, metricArr, startDate, endDate, filters) {
let propertyId = "XXXXXXXXXXX";
let propertyName = "Your Property Name"; // Set your desired property name here
let spreadsheet = SpreadsheetApp.create('Google Analytics Report');
let sheet = spreadsheet.getSheetByName(sheetName) || spreadsheet.insertSheet(sheetName);
// Loop over each date within the specified range
let currentDate = new Date(startDate);
let end = new Date(endDate);
while (currentDate <= end) {
let formattedDate = currentDate.toISOString().split("T")[0]; // Format to YYYY-MM-DD
let nextPageToken;
do {
try {
let request = AnalyticsData.newRunReportRequest();
// Date
let dateRange = AnalyticsData.newDateRange();
dateRange.startDate = formattedDate;
dateRange.endDate = formattedDate;
request.dateRanges = dateRange;
// Metric and Dimension setup
let metrics = [];
for (let x = 0; x < metricArr.length; x++) {
let metricx = AnalyticsData.newMetric();
metricx.name = metricArr[x];
metrics.push(metricx);
}
request.metrics = metrics;
let dimensions = [];
for (let x = 0; x < dimensionArr.length; x++) {
let dimensionx = AnalyticsData.newDimension();
dimensionx.name = dimensionArr[x];
dimensions.push(dimensionx);
}
request.dimensions = dimensions;
// OrderBy = Date
let dimensionOrderBy = AnalyticsData.newDimensionOrderBy();
dimensionOrderBy.dimensionName = 'date';
let orderby = AnalyticsData.newOrderBy();
orderby.dimension = dimensionOrderBy;
orderby.desc = false;
request.orderBys = [orderby];
// Filter
if (filters) {
let dimensionfilter = AnalyticsData.newFilterExpression();
dimensionfilter.andGroup = AnalyticsData.newFilterExpressionList();
dimensionfilter.andGroup.expressions = [];
for (let x = 0; x < filters.length; x++) {
for (let j = 0; j < filters[x].conditions.length; j++) {
let filterExpression = AnalyticsData.newFilterExpression();
filterExpression.filter = AnalyticsData.newFilter();
filterExpression.filter.fieldName = filters[x].fieldName;
filterExpression.filter.stringFilter = AnalyticsData.newStringFilter();
filterExpression.filter.stringFilter.value = filters[x].conditions[j];
filterExpression.filter.stringFilter.matchType = filters[x].matchType;
dimensionfilter.andGroup.expressions.push(filterExpression);
}
}
request.dimensionFilter = dimensionfilter;
}
// Pagination
if (nextPageToken) {
request.pageToken = nextPageToken;
}
let report = AnalyticsData.Properties.runReport(request, 'properties/' + propertyId);
// Exit if no rows are found
if (!report.rows || !report.rows.length) {
Logger.log(propertyName + '\tNo rows returned for date ' + formattedDate);
break;
}
// Append the headers only if they haven't been added before
if (sheet.getLastRow() === 0) {
let dimensionHeaders = report.dimensionHeaders.map(dimensionHeader => dimensionHeader.name);
let metricHeaders = report.metricHeaders.map(metricHeader => metricHeader.name);
let headers = [...['datasource'], ...dimensionHeaders, ...metricHeaders];
sheet.appendRow(headers);
}
// Append the results.
let rows = report.rows.map((row) => {
let dimensionValues = row.dimensionValues.map(dimensionValue => dimensionValue.value);
let metricValues = row.metricValues.map(metricValue => metricValue.value);
return [...[propertyName], ...dimensionValues, ...metricValues];
});
sheet.getRange(sheet.getLastRow() + 1, 1, rows.length, rows[0].length).setValues(rows);
// Update nextPageToken for next iteration
nextPageToken = report.nextPageToken;
} catch (e) {
Logger.log('Failed with error: %s', e);
break;
}
} while (nextPageToken); // Continue fetching as long as there's a nextPageToken
// Move to the next date
currentDate.setDate(currentDate.getDate() + 1);
}
Logger.log('%s:\tReport spreadsheet created: %s', propertyName, spreadsheet.getUrl());
}
function main() {
let dimensions = [], metrics = [], filters = [];
let startDate = "2022-09-02";
let endDate = "2022-09-03";
dimensions = ['date','landingPage', 'pagePath', 'sessionDefaultChannelGroup','sessionSourceMedium'];
metrics = ['sessions', 'totalRevenue'];
runReport("Sheet1", dimensions, metrics, startDate, endDate);
}
但是,接收的数据仅限于每天请求的 10K 行。 我想克服每天(日期)10K 行的限制并从 API 中提取更多数据。
当我尝试使用 PageToken 在一天内检索超过 10K 行时,限制仍然存在。 更重要的是,当我添加 PageSize 时,我收到错误类型:
GoogleJsonResponseException:对analyticsdata.properties.runReport的API调用失败并出现错误:收到无效的JSON负载。未知名称“pageSize”:找不到字段。
知道如何纠正当前代码吗?
这个想法是通过获取所有数据(每 10K 10K,直到从一天/日期提取所有数据)来让循环工作,然后根据脚本上设置的日期移动到下一天。
谢谢
我觉得你有点困惑。您可以请求的行数没有限制。
报告请求有一个名为 limit 的参数。默认设置为 10k
如果未指定,则返回 10,000 行。无论您请求多少行,API 每个请求最多返回 250,000 行。
报告的每一页将返回最多 Limit 行。如果报告中有更多行。
然后您需要使用限制和偏移参数对附加页面进行分页
例如“limit”:100000,“offset”:100000将返回从行号100k开始的接下来的100k行
因此,您的解决方案是分页以从 api 获取更多数据。