push all website files

This commit is contained in:
Jacob Levine
2019-01-06 13:14:45 -06:00
parent d7301e26c3
commit d2d5d4c04e
15662 changed files with 2166516 additions and 0 deletions

View File

@@ -0,0 +1,653 @@
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.datatransfer.v1;
import "google/api/annotations.proto";
import "google/cloud/bigquery/datatransfer/v1/transfer.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/empty.proto";
import "google/protobuf/field_mask.proto";
import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
option java_multiple_files = true;
option java_outer_classname = "DataTransferProto";
option java_package = "com.google.cloud.bigquery.datatransfer.v1";
option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";
// The Google BigQuery Data Transfer Service API enables BigQuery users to
// configure the transfer of their data from other Google Products into BigQuery.
// This service contains methods that are end user exposed. It backs up the
// frontend.
service DataTransferService {
// Retrieves a supported data source and returns its settings,
// which can be used for UI rendering.
rpc GetDataSource(GetDataSourceRequest) returns (DataSource) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/dataSources/*}"
additional_bindings {
get: "/v1/{name=projects/*/dataSources/*}"
}
};
}
// Lists supported data sources and returns their settings,
// which can be used for UI rendering.
rpc ListDataSources(ListDataSourcesRequest) returns (ListDataSourcesResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*}/dataSources"
additional_bindings {
get: "/v1/{parent=projects/*}/dataSources"
}
};
}
// Creates a new data transfer configuration.
rpc CreateTransferConfig(CreateTransferConfigRequest) returns (TransferConfig) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*}/transferConfigs"
body: "transfer_config"
additional_bindings {
post: "/v1/{parent=projects/*}/transferConfigs"
body: "transfer_config"
}
};
}
// Updates a data transfer configuration.
// All fields must be set, even if they are not updated.
rpc UpdateTransferConfig(UpdateTransferConfigRequest) returns (TransferConfig) {
option (google.api.http) = {
patch: "/v1/{transfer_config.name=projects/*/locations/*/transferConfigs/*}"
body: "transfer_config"
additional_bindings {
patch: "/v1/{transfer_config.name=projects/*/transferConfigs/*}"
body: "transfer_config"
}
};
}
// Deletes a data transfer configuration,
// including any associated transfer runs and logs.
rpc DeleteTransferConfig(DeleteTransferConfigRequest) returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
additional_bindings {
delete: "/v1/{name=projects/*/transferConfigs/*}"
}
};
}
// Returns information about a data transfer config.
rpc GetTransferConfig(GetTransferConfigRequest) returns (TransferConfig) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/transferConfigs/*}"
additional_bindings {
get: "/v1/{name=projects/*/transferConfigs/*}"
}
};
}
// Returns information about all data transfers in the project.
rpc ListTransferConfigs(ListTransferConfigsRequest) returns (ListTransferConfigsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*}/transferConfigs"
additional_bindings {
get: "/v1/{parent=projects/*}/transferConfigs"
}
};
}
// Creates transfer runs for a time range [start_time, end_time].
// For each date - or whatever granularity the data source supports - in the
// range, one transfer run is created.
// Note that runs are created per UTC time in the time range.
rpc ScheduleTransferRuns(ScheduleTransferRunsRequest) returns (ScheduleTransferRunsResponse) {
option (google.api.http) = {
post: "/v1/{parent=projects/*/locations/*/transferConfigs/*}:scheduleRuns"
body: "*"
additional_bindings {
post: "/v1/{parent=projects/*/transferConfigs/*}:scheduleRuns"
body: "*"
}
};
}
// Returns information about the particular transfer run.
rpc GetTransferRun(GetTransferRunRequest) returns (TransferRun) {
option (google.api.http) = {
get: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}"
additional_bindings {
get: "/v1/{name=projects/*/transferConfigs/*/runs/*}"
}
};
}
// Deletes the specified transfer run.
rpc DeleteTransferRun(DeleteTransferRunRequest) returns (google.protobuf.Empty) {
option (google.api.http) = {
delete: "/v1/{name=projects/*/locations/*/transferConfigs/*/runs/*}"
additional_bindings {
delete: "/v1/{name=projects/*/transferConfigs/*/runs/*}"
}
};
}
// Returns information about running and completed jobs.
rpc ListTransferRuns(ListTransferRunsRequest) returns (ListTransferRunsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*/transferConfigs/*}/runs"
additional_bindings {
get: "/v1/{parent=projects/*/transferConfigs/*}/runs"
}
};
}
// Returns user facing log messages for the data transfer run.
rpc ListTransferLogs(ListTransferLogsRequest) returns (ListTransferLogsResponse) {
option (google.api.http) = {
get: "/v1/{parent=projects/*/locations/*/transferConfigs/*/runs/*}/transferLogs"
additional_bindings {
get: "/v1/{parent=projects/*/transferConfigs/*/runs/*}/transferLogs"
}
};
}
// Returns true if valid credentials exist for the given data source and
// requesting user.
// Some data sources doesn't support service account, so we need to talk to
// them on behalf of the end user. This API just checks whether we have OAuth
// token for the particular user, which is a pre-requisite before user can
// create a transfer config.
rpc CheckValidCreds(CheckValidCredsRequest) returns (CheckValidCredsResponse) {
option (google.api.http) = {
post: "/v1/{name=projects/*/locations/*/dataSources/*}:checkValidCreds"
body: "*"
additional_bindings {
post: "/v1/{name=projects/*/dataSources/*}:checkValidCreds"
body: "*"
}
};
}
}
// Represents a data source parameter with validation rules, so that
// parameters can be rendered in the UI. These parameters are given to us by
// supported data sources, and include all needed information for rendering
// and validation.
// Thus, whoever uses this api can decide to generate either generic ui,
// or custom data source specific forms.
message DataSourceParameter {
// Parameter type.
enum Type {
// Type unspecified.
TYPE_UNSPECIFIED = 0;
// String parameter.
STRING = 1;
// Integer parameter (64-bits).
// Will be serialized to json as string.
INTEGER = 2;
// Double precision floating point parameter.
DOUBLE = 3;
// Boolean parameter.
BOOLEAN = 4;
// Record parameter.
RECORD = 5;
// Page ID for a Google+ Page.
PLUS_PAGE = 6;
}
// Parameter identifier.
string param_id = 1;
// Parameter display name in the user interface.
string display_name = 2;
// Parameter description.
string description = 3;
// Parameter type.
Type type = 4;
// Is parameter required.
bool required = 5;
// Can parameter have multiple values.
bool repeated = 6;
// Regular expression which can be used for parameter validation.
string validation_regex = 7;
// All possible values for the parameter.
repeated string allowed_values = 8;
// For integer and double values specifies minimum allowed value.
google.protobuf.DoubleValue min_value = 9;
// For integer and double values specifies maxminum allowed value.
google.protobuf.DoubleValue max_value = 10;
// When parameter is a record, describes child fields.
repeated DataSourceParameter fields = 11;
// Description of the requirements for this field, in case the user input does
// not fulfill the regex pattern or min/max values.
string validation_description = 12;
// URL to a help document to further explain the naming requirements.
string validation_help_url = 13;
// Cannot be changed after initial creation.
bool immutable = 14;
// If set to true, schema should be taken from the parent with the same
// parameter_id. Only applicable when parameter type is RECORD.
bool recurse = 15;
}
// Represents data source metadata. Metadata is sufficient to
// render UI and request proper OAuth tokens.
message DataSource {
// The type of authorization needed for this data source.
enum AuthorizationType {
// Type unspecified.
AUTHORIZATION_TYPE_UNSPECIFIED = 0;
// Use OAuth 2 authorization codes that can be exchanged
// for a refresh token on the backend.
AUTHORIZATION_CODE = 1;
// Return an authorization code for a given Google+ page that can then be
// exchanged for a refresh token on the backend.
GOOGLE_PLUS_AUTHORIZATION_CODE = 2;
}
// Represents how the data source supports data auto refresh.
enum DataRefreshType {
// The data source won't support data auto refresh, which is default value.
DATA_REFRESH_TYPE_UNSPECIFIED = 0;
// The data source supports data auto refresh, and runs will be scheduled
// for the past few days. Does not allow custom values to be set for each
// transfer config.
SLIDING_WINDOW = 1;
// The data source supports data auto refresh, and runs will be scheduled
// for the past few days. Allows custom values to be set for each transfer
// config.
CUSTOM_SLIDING_WINDOW = 2;
}
// Output only. Data source resource name.
string name = 1;
// Data source id.
string data_source_id = 2;
// User friendly data source name.
string display_name = 3;
// User friendly data source description string.
string description = 4;
// Data source client id which should be used to receive refresh token.
// When not supplied, no offline credentials are populated for data transfer.
string client_id = 5;
// Api auth scopes for which refresh token needs to be obtained. Only valid
// when `client_id` is specified. Ignored otherwise. These are scopes needed
// by a data source to prepare data and ingest them into BigQuery,
// e.g., https://www.googleapis.com/auth/bigquery
repeated string scopes = 6;
// Deprecated. This field has no effect.
TransferType transfer_type = 7;
// Indicates whether the data source supports multiple transfers
// to different BigQuery targets.
bool supports_multiple_transfers = 8;
// The number of seconds to wait for an update from the data source
// before BigQuery marks the transfer as failed.
int32 update_deadline_seconds = 9;
// Default data transfer schedule.
// Examples of valid schedules include:
// `1st,3rd monday of month 15:30`,
// `every wed,fri of jan,jun 13:15`, and
// `first sunday of quarter 00:00`.
string default_schedule = 10;
// Specifies whether the data source supports a user defined schedule, or
// operates on the default schedule.
// When set to `true`, user can override default schedule.
bool supports_custom_schedule = 11;
// Data source parameters.
repeated DataSourceParameter parameters = 12;
// Url for the help document for this data source.
string help_url = 13;
// Indicates the type of authorization.
AuthorizationType authorization_type = 14;
// Specifies whether the data source supports automatic data refresh for the
// past few days, and how it's supported.
// For some data sources, data might not be complete until a few days later,
// so it's useful to refresh data automatically.
DataRefreshType data_refresh_type = 15;
// Default data refresh window on days.
// Only meaningful when `data_refresh_type` = `SLIDING_WINDOW`.
int32 default_data_refresh_window_days = 16;
// Disables backfilling and manual run scheduling
// for the data source.
bool manual_runs_disabled = 17;
// The minimum interval for scheduler to schedule runs.
google.protobuf.Duration minimum_schedule_interval = 18;
}
// A request to get data source info.
message GetDataSourceRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/dataSources/{data_source_id}`
string name = 1;
}
// Request to list supported data sources and their data transfer settings.
message ListDataSourcesRequest {
// The BigQuery project id for which data sources should be returned.
// Must be in the form: `projects/{project_id}`
string parent = 1;
// Pagination token, which can be used to request a specific page
// of `ListDataSourcesRequest` list results. For multiple-page
// results, `ListDataSourcesResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 3;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 4;
}
// Returns list of supported data sources and their metadata.
message ListDataSourcesResponse {
// List of supported data sources and their transfer settings.
repeated DataSource data_sources = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `ListDataSourcesRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to create a data transfer configuration. If new credentials are
// needed for this transfer configuration, an authorization code must be
// provided. If an authorization code is provided, the transfer configuration
// will be associated with the user id corresponding to the
// authorization code. Otherwise, the transfer configuration will be associated
// with the calling user.
message CreateTransferConfigRequest {
// The BigQuery project id where the transfer configuration should be created.
// Must be in the format /projects/{project_id}/locations/{location_id}
// If specified location and location of the destination bigquery dataset
// do not match - the request will fail.
string parent = 1;
// Data transfer configuration to create.
TransferConfig transfer_config = 2;
// Optional OAuth2 authorization code to use with this transfer configuration.
// This is required if new credentials are needed, as indicated by
// `CheckValidCreds`.
// In order to obtain authorization_code, please make a
// request to
// https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
//
// * client_id should be OAuth client_id of BigQuery DTS API for the given
// data source returned by ListDataSources method.
// * data_source_scopes are the scopes returned by ListDataSources method.
// * redirect_uri is an optional parameter. If not specified, then
// authorization code is posted to the opener of authorization flow window.
// Otherwise it will be sent to the redirect uri. A special value of
// urn:ietf:wg:oauth:2.0:oob means that authorization code should be
// returned in the title bar of the browser, with the page text prompting
// the user to copy the code and paste it in the application.
string authorization_code = 3;
}
// A request to update a transfer configuration. To update the user id of the
// transfer configuration, an authorization code needs to be provided.
message UpdateTransferConfigRequest {
// Data transfer configuration to create.
TransferConfig transfer_config = 1;
// Optional OAuth2 authorization code to use with this transfer configuration.
// If it is provided, the transfer configuration will be associated with the
// authorizing user.
// In order to obtain authorization_code, please make a
// request to
// https://www.gstatic.com/bigquerydatatransfer/oauthz/auth?client_id=<datatransferapiclientid>&scope=<data_source_scopes>&redirect_uri=<redirect_uri>
//
// * client_id should be OAuth client_id of BigQuery DTS API for the given
// data source returned by ListDataSources method.
// * data_source_scopes are the scopes returned by ListDataSources method.
// * redirect_uri is an optional parameter. If not specified, then
// authorization code is posted to the opener of authorization flow window.
// Otherwise it will be sent to the redirect uri. A special value of
// urn:ietf:wg:oauth:2.0:oob means that authorization code should be
// returned in the title bar of the browser, with the page text prompting
// the user to copy the code and paste it in the application.
string authorization_code = 3;
// Required list of fields to be updated in this request.
google.protobuf.FieldMask update_mask = 4;
}
// A request to get data transfer information.
message GetTransferConfigRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}`
string name = 1;
}
// A request to delete data transfer information. All associated transfer runs
// and log messages will be deleted as well.
message DeleteTransferConfigRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}`
string name = 1;
}
// A request to get data transfer run information.
message GetTransferRunRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`
string name = 1;
}
// A request to delete data transfer run information.
message DeleteTransferRunRequest {
// The field will contain name of the resource requested, for example:
// `projects/{project_id}/transferConfigs/{config_id}/runs/{run_id}`
string name = 1;
}
// A request to list data transfers configured for a BigQuery project.
message ListTransferConfigsRequest {
// The BigQuery project id for which data sources
// should be returned: `projects/{project_id}`.
string parent = 1;
// When specified, only configurations of requested data sources are returned.
repeated string data_source_ids = 2;
// Pagination token, which can be used to request a specific page
// of `ListTransfersRequest` list results. For multiple-page
// results, `ListTransfersResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 3;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 4;
}
// The returned list of pipelines in the project.
message ListTransferConfigsResponse {
// Output only. The stored pipeline transfer configurations.
repeated TransferConfig transfer_configs = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `ListTransferConfigsRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to list data transfer runs. UI can use this method to show/filter
// specific data transfer runs. The data source can use this method to request
// all scheduled transfer runs.
message ListTransferRunsRequest {
// Represents which runs should be pulled.
enum RunAttempt {
// All runs should be returned.
RUN_ATTEMPT_UNSPECIFIED = 0;
// Only latest run per day should be returned.
LATEST = 1;
}
// Name of transfer configuration for which transfer runs should be retrieved.
// Format of transfer configuration resource name is:
// `projects/{project_id}/transferConfigs/{config_id}`.
string parent = 1;
// When specified, only transfer runs with requested states are returned.
repeated TransferState states = 2;
// Pagination token, which can be used to request a specific page
// of `ListTransferRunsRequest` list results. For multiple-page
// results, `ListTransferRunsResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 3;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 4;
// Indicates how run attempts are to be pulled.
RunAttempt run_attempt = 5;
}
// The returned list of pipelines in the project.
message ListTransferRunsResponse {
// Output only. The stored pipeline transfer runs.
repeated TransferRun transfer_runs = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `ListTransferRunsRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to get user facing log messages associated with data transfer run.
message ListTransferLogsRequest {
// Transfer run name in the form:
// `projects/{project_id}/transferConfigs/{config_Id}/runs/{run_id}`.
string parent = 1;
// Pagination token, which can be used to request a specific page
// of `ListTransferLogsRequest` list results. For multiple-page
// results, `ListTransferLogsResponse` outputs
// a `next_page` token, which can be used as the
// `page_token` value to request the next page of list results.
string page_token = 4;
// Page size. The default page size is the maximum value of 1000 results.
int32 page_size = 5;
// Message types to return. If not populated - INFO, WARNING and ERROR
// messages are returned.
repeated TransferMessage.MessageSeverity message_types = 6;
}
// The returned list transfer run messages.
message ListTransferLogsResponse {
// Output only. The stored pipeline transfer messages.
repeated TransferMessage transfer_messages = 1;
// Output only. The next-pagination token. For multiple-page list results,
// this token can be used as the
// `GetTransferRunLogRequest.page_token`
// to request the next page of list results.
string next_page_token = 2;
}
// A request to determine whether the user has valid credentials. This method
// is used to limit the number of OAuth popups in the user interface. The
// user id is inferred from the API call context.
// If the data source has the Google+ authorization type, this method
// returns false, as it cannot be determined whether the credentials are
// already valid merely based on the user id.
message CheckValidCredsRequest {
// The data source in the form:
// `projects/{project_id}/dataSources/{data_source_id}`
string name = 1;
}
// A response indicating whether the credentials exist and are valid.
message CheckValidCredsResponse {
// If set to `true`, the credentials exist and are valid.
bool has_valid_creds = 1;
}
// A request to schedule transfer runs for a time range.
message ScheduleTransferRunsRequest {
// Transfer configuration name in the form:
// `projects/{project_id}/transferConfigs/{config_id}`.
string parent = 1;
// Start time of the range of transfer runs. For example,
// `"2017-05-25T00:00:00+00:00"`.
google.protobuf.Timestamp start_time = 2;
// End time of the range of transfer runs. For example,
// `"2017-05-30T00:00:00+00:00"`.
google.protobuf.Timestamp end_time = 3;
}
// A response to schedule transfer runs for a time range.
message ScheduleTransferRunsResponse {
// The transfer runs that were scheduled.
repeated TransferRun runs = 1;
}

View File

@@ -0,0 +1,222 @@
// Copyright 2018 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.datatransfer.v1;
import "google/api/annotations.proto";
import "google/protobuf/struct.proto";
import "google/protobuf/timestamp.proto";
import "google/rpc/status.proto";
option csharp_namespace = "Google.Cloud.BigQuery.DataTransfer.V1";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1;datatransfer";
option java_multiple_files = true;
option java_outer_classname = "TransferProto";
option java_package = "com.google.cloud.bigquery.datatransfer.v1";
option objc_class_prefix = "GCBDT";
option php_namespace = "Google\\Cloud\\BigQuery\\DataTransfer\\V1";
// Represents a data transfer configuration. A transfer configuration
// contains all metadata needed to perform a data transfer. For example,
// `destination_dataset_id` specifies where data should be stored.
// When a new transfer configuration is created, the specified
// `destination_dataset_id` is created when needed and shared with the
// appropriate data source service account.
message TransferConfig {
// The resource name of the transfer config.
// Transfer config names have the form
// `projects/{project_id}/transferConfigs/{config_id}`.
// Where `config_id` is usually a uuid, even though it is not
// guaranteed or required. The name is ignored when creating a transfer
// config.
string name = 1;
// The BigQuery target dataset id.
string destination_dataset_id = 2;
// User specified display name for the data transfer.
string display_name = 3;
// Data source id. Cannot be changed once data transfer is created.
string data_source_id = 5;
// Data transfer specific parameters.
google.protobuf.Struct params = 9;
// Data transfer schedule.
// If the data source does not support a custom schedule, this should be
// empty. If it is empty, the default value for the data source will be
// used.
// The specified times are in UTC.
// Examples of valid format:
// `1st,3rd monday of month 15:30`,
// `every wed,fri of jan,jun 13:15`, and
// `first sunday of quarter 00:00`.
// See more explanation about the format here:
// https://cloud.google.com/appengine/docs/flexible/python/scheduling-jobs-with-cron-yaml#the_schedule_format
// NOTE: the granularity should be at least 8 hours, or less frequent.
string schedule = 7;
// The number of days to look back to automatically refresh the data.
// For example, if `data_refresh_window_days = 10`, then every day
// BigQuery reingests data for [today-10, today-1], rather than ingesting data
// for just [today-1].
// Only valid if the data source supports the feature. Set the value to 0
// to use the default value.
int32 data_refresh_window_days = 12;
// Is this config disabled. When set to true, no runs are scheduled
// for a given transfer.
bool disabled = 13;
// Output only. Data transfer modification time. Ignored by server on input.
google.protobuf.Timestamp update_time = 4;
// Output only. Next time when data transfer will run.
google.protobuf.Timestamp next_run_time = 8;
// Output only. State of the most recently updated transfer run.
TransferState state = 10;
// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
// May be negative. Note, that this identifier is not stable.
// It may change over time even for the same user.
int64 user_id = 11;
// Output only. Region in which BigQuery dataset is located.
string dataset_region = 14;
}
// Represents a data transfer run.
message TransferRun {
// The resource name of the transfer run.
// Transfer run names have the form
// `projects/{project_id}/locations/{location}/transferConfigs/{config_id}/runs/{run_id}`.
// The name is ignored when creating a transfer run.
string name = 1;
// Minimum time after which a transfer run can be started.
google.protobuf.Timestamp schedule_time = 3;
// For batch transfer runs, specifies the date and time that
// data should be ingested.
google.protobuf.Timestamp run_time = 10;
// Status of the transfer run.
google.rpc.Status error_status = 21;
// Output only. Time when transfer run was started.
// Parameter ignored by server for input requests.
google.protobuf.Timestamp start_time = 4;
// Output only. Time when transfer run ended.
// Parameter ignored by server for input requests.
google.protobuf.Timestamp end_time = 5;
// Output only. Last time the data transfer run state was updated.
google.protobuf.Timestamp update_time = 6;
// Output only. Data transfer specific parameters.
google.protobuf.Struct params = 9;
// Output only. The BigQuery target dataset id.
string destination_dataset_id = 2;
// Output only. Data source id.
string data_source_id = 7;
// Data transfer run state. Ignored for input requests.
TransferState state = 8;
// Output only. Unique ID of the user on whose behalf transfer is done.
// Applicable only to data sources that do not support service accounts.
// When set to 0, the data source service account credentials are used.
// May be negative. Note, that this identifier is not stable.
// It may change over time even for the same user.
int64 user_id = 11;
// Output only. Describes the schedule of this transfer run if it was
// created as part of a regular schedule. For batch transfer runs that are
// scheduled manually, this is empty.
// NOTE: the system might choose to delay the schedule depending on the
// current load, so `schedule_time` doesn't always matches this.
string schedule = 12;
}
// Represents a user facing message for a particular data transfer run.
message TransferMessage {
// Represents data transfer user facing message severity.
enum MessageSeverity {
// No severity specified.
MESSAGE_SEVERITY_UNSPECIFIED = 0;
// Informational message.
INFO = 1;
// Warning message.
WARNING = 2;
// Error message.
ERROR = 3;
}
// Time when message was logged.
google.protobuf.Timestamp message_time = 1;
// Message severity.
MessageSeverity severity = 2;
// Message text.
string message_text = 3;
}
// DEPRECATED. Represents data transfer type.
enum TransferType {
// Invalid or Unknown transfer type placeholder.
TRANSFER_TYPE_UNSPECIFIED = 0;
// Batch data transfer.
BATCH = 1;
// Streaming data transfer. Streaming data source currently doesn't
// support multiple transfer configs per project.
STREAMING = 2;
}
// Represents data transfer run state.
enum TransferState {
// State placeholder.
TRANSFER_STATE_UNSPECIFIED = 0;
// Data transfer is scheduled and is waiting to be picked up by
// data transfer backend.
PENDING = 2;
// Data transfer is in progress.
RUNNING = 3;
// Data transfer completed successsfully.
SUCCEEDED = 4;
// Data transfer failed.
FAILED = 5;
// Data transfer is cancelled.
CANCELLED = 6;
}

View File

@@ -0,0 +1,525 @@
// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
syntax = "proto3";
package google.cloud.bigquery.logging.v1;
import "google/api/annotations.proto";
import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "google/rpc/status.proto";
option go_package = "google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1;logging";
option java_multiple_files = true;
option java_outer_classname = "AuditDataProto";
option java_package = "com.google.cloud.bigquery.logging.v1";
// BigQuery request and response messages for audit log.
// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
// `Table.schema` may continue to be present in your logs during this
// transition.
message AuditData {
// Request data for each BigQuery method.
oneof request {
// Table insert request.
TableInsertRequest table_insert_request = 1;
// Table update request.
TableUpdateRequest table_update_request = 16;
// Dataset list request.
DatasetListRequest dataset_list_request = 2;
// Dataset insert request.
DatasetInsertRequest dataset_insert_request = 3;
// Dataset update request.
DatasetUpdateRequest dataset_update_request = 4;
// Job insert request.
JobInsertRequest job_insert_request = 5;
// Job query request.
JobQueryRequest job_query_request = 6;
// Job get query results request.
JobGetQueryResultsRequest job_get_query_results_request = 7;
// Table data-list request.
TableDataListRequest table_data_list_request = 8;
}
// Response data for each BigQuery method.
oneof response {
// Table insert response.
TableInsertResponse table_insert_response = 9;
// Table update response.
TableUpdateResponse table_update_response = 10;
// Dataset insert response.
DatasetInsertResponse dataset_insert_response = 11;
// Dataset update response.
DatasetUpdateResponse dataset_update_response = 12;
// Job insert response.
JobInsertResponse job_insert_response = 18;
// Job query response.
JobQueryResponse job_query_response = 13;
// Job get query results response.
JobGetQueryResultsResponse job_get_query_results_response = 14;
// Deprecated: Job query-done response. Use this information for usage
// analysis.
JobQueryDoneResponse job_query_done_response = 15;
}
// A job completion event.
JobCompletedEvent job_completed_event = 17;
}
// Table insert request.
message TableInsertRequest {
// The new table.
Table resource = 1;
}
// Table update request.
message TableUpdateRequest {
// The table to be updated.
Table resource = 1;
}
// Table insert response.
message TableInsertResponse {
// Final state of the inserted table.
Table resource = 1;
}
// Table update response.
message TableUpdateResponse {
// Final state of the updated table.
Table resource = 1;
}
// Dataset list request.
message DatasetListRequest {
// Whether to list all datasets, including hidden ones.
bool list_all = 1;
}
// Dataset insert request.
message DatasetInsertRequest {
// The dataset to be inserted.
Dataset resource = 1;
}
// Dataset insert response.
message DatasetInsertResponse {
// Final state of the inserted dataset.
Dataset resource = 1;
}
// Dataset update request.
message DatasetUpdateRequest {
// The dataset to be updated.
Dataset resource = 1;
}
// Dataset update response.
message DatasetUpdateResponse {
// Final state of the updated dataset.
Dataset resource = 1;
}
// Job insert request.
message JobInsertRequest {
// Job insert request.
Job resource = 1;
}
// Job insert response.
message JobInsertResponse {
// Job insert response.
Job resource = 1;
}
// Job query request.
message JobQueryRequest {
// The query.
string query = 1;
// The maximum number of results.
uint32 max_results = 2;
// The default dataset for tables that do not have a dataset specified.
DatasetName default_dataset = 3;
// Project that the query should be charged to.
string project_id = 4;
// If true, don't actually run the job. Just check that it would run.
bool dry_run = 5;
}
// Job query response.
message JobQueryResponse {
// The total number of rows in the full query result set.
uint64 total_results = 1;
// Information about the queried job.
Job job = 2;
}
// Job getQueryResults request.
message JobGetQueryResultsRequest {
// Maximum number of results to return.
uint32 max_results = 1;
// Zero-based row number at which to start.
uint64 start_row = 2;
}
// Job getQueryResults response.
message JobGetQueryResultsResponse {
// Total number of results in query results.
uint64 total_results = 1;
// The job that was created to run the query.
// It completed if `job.status.state` is `DONE`.
// It failed if `job.status.errorResult` is also present.
Job job = 2;
}
// Job getQueryDone response.
message JobQueryDoneResponse {
// The job and status information.
// The job completed if `job.status.state` is `DONE`.
Job job = 1;
}
// Query job completed event.
message JobCompletedEvent {
// Name of the event.
string event_name = 1;
// Job information.
Job job = 2;
}
// Table data-list request.
message TableDataListRequest {
// Starting row offset.
uint64 start_row = 1;
// Maximum number of results to return.
uint32 max_results = 2;
}
// Describes a BigQuery table.
// See the [Table](/bigquery/docs/reference/v2/tables) API resource
// for more details on individual fields.
// Note: `Table.schema` has been deprecated in favor of `Table.schemaJson`.
// `Table.schema` may continue to be present in your logs during this
// transition.
message Table {
// The name of the table.
TableName table_name = 1;
// User-provided metadata for the table.
TableInfo info = 2;
// A JSON representation of the table's schema.
string schema_json = 8;
// If present, this is a virtual table defined by a SQL query.
TableViewDefinition view = 4;
// The expiration date for the table, after which the table
// is deleted and the storage reclaimed.
// If not present, the table persists indefinitely.
google.protobuf.Timestamp expire_time = 5;
// The time the table was created.
google.protobuf.Timestamp create_time = 6;
// The time the table was last truncated
// by an operation with a `writeDisposition` of `WRITE_TRUNCATE`.
google.protobuf.Timestamp truncate_time = 7;
}
// User-provided metadata for a table.
message TableInfo {
// A short name for the table, such as`"Analytics Data - Jan 2011"`.
string friendly_name = 1;
// A long description, perhaps several paragraphs,
// describing the table contents in detail.
string description = 2;
}
// Describes a virtual table defined by a SQL query.
message TableViewDefinition {
// SQL query defining the view.
string query = 1;
}
// BigQuery dataset information.
// See the [Dataset](/bigquery/docs/reference/v2/datasets) API resource
// for more details on individual fields.
message Dataset {
// The name of the dataset.
DatasetName dataset_name = 1;
// User-provided metadata for the dataset.
DatasetInfo info = 2;
// The time the dataset was created.
google.protobuf.Timestamp create_time = 4;
// The time the dataset was last modified.
google.protobuf.Timestamp update_time = 5;
// The access control list for the dataset.
BigQueryAcl acl = 6;
// If this field is present, each table that does not specify an
// expiration time is assigned an expiration time by adding this
// duration to the table's `createTime`. If this field is empty,
// there is no default table expiration time.
google.protobuf.Duration default_table_expire_duration = 8;
}
// User-provided metadata for a dataset.
message DatasetInfo {
// A short name for the dataset, such as`"Analytics Data 2011"`.
string friendly_name = 1;
// A long description, perhaps several paragraphs,
// describing the dataset contents in detail.
string description = 2;
}
// An access control list.
message BigQueryAcl {
// Access control entry.
message Entry {
// The granted role, which can be `READER`, `WRITER`, or `OWNER`.
string role = 1;
// Grants access to a group identified by an email address.
string group_email = 2;
// Grants access to a user identified by an email address.
string user_email = 3;
// Grants access to all members of a domain.
string domain = 4;
// Grants access to special groups. Valid groups are `PROJECT_OWNERS`,
// `PROJECT_READERS`, `PROJECT_WRITERS` and `ALL_AUTHENTICATED_USERS`.
string special_group = 5;
// Grants access to a BigQuery View.
TableName view_name = 6;
}
// Access control entry list.
repeated Entry entries = 1;
}
// Describes a job.
message Job {
// Job name.
JobName job_name = 1;
// Job configuration.
JobConfiguration job_configuration = 2;
// Job status.
JobStatus job_status = 3;
// Job statistics.
JobStatistics job_statistics = 4;
}
// Job configuration information.
// See the [Jobs](/bigquery/docs/reference/v2/jobs) API resource
// for more details on individual fields.
message JobConfiguration {
// Describes a query job, which executes a SQL-like query.
message Query {
// The SQL query to run.
string query = 1;
// The table where results are written.
TableName destination_table = 2;
// Describes when a job is allowed to create a table:
// `CREATE_IF_NEEDED`, `CREATE_NEVER`.
string create_disposition = 3;
// Describes how writes affect existing tables:
// `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
string write_disposition = 4;
// If a table name is specified without a dataset in a query,
// this dataset will be added to table name.
DatasetName default_dataset = 5;
// Describes data sources outside BigQuery, if needed.
repeated TableDefinition table_definitions = 6;
}
// Describes a load job, which loads data from an external source via
// the import pipeline.
message Load {
// URIs for the data to be imported. Only Google Cloud Storage URIs are
// supported.
repeated string source_uris = 1;
// The table schema in JSON format representation of a TableSchema.
string schema_json = 6;
// The table where the imported data is written.
TableName destination_table = 3;
// Describes when a job is allowed to create a table:
// `CREATE_IF_NEEDED`, `CREATE_NEVER`.
string create_disposition = 4;
// Describes how writes affect existing tables:
// `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
string write_disposition = 5;
}
// Describes an extract job, which exports data to an external source
// via the export pipeline.
message Extract {
// Google Cloud Storage URIs where extracted data should be written.
repeated string destination_uris = 1;
// The source table.
TableName source_table = 2;
}
// Describes a copy job, which copies an existing table to another table.
message TableCopy {
// Source tables.
repeated TableName source_tables = 1;
// Destination table.
TableName destination_table = 2;
// Describes when a job is allowed to create a table:
// `CREATE_IF_NEEDED`, `CREATE_NEVER`.
string create_disposition = 3;
// Describes how writes affect existing tables:
// `WRITE_TRUNCATE`, `WRITE_APPEND`, `WRITE_EMPTY`.
string write_disposition = 4;
}
// Job configuration information.
oneof configuration {
// Query job information.
Query query = 5;
// Load job information.
Load load = 6;
// Extract job information.
Extract extract = 7;
// TableCopy job information.
TableCopy table_copy = 8;
}
// If true, don't actually run the job. Just check that it would run.
bool dry_run = 9;
}
// Describes an external data source used in a query.
message TableDefinition {
// Name of the table, used in queries.
string name = 1;
// Google Cloud Storage URIs for the data to be imported.
repeated string source_uris = 2;
}
// Running state of a job.
message JobStatus {
// State of a job: `PENDING`, `RUNNING`, or `DONE`.
string state = 1;
// If the job did not complete successfully, this field describes why.
google.rpc.Status error = 2;
}
// Job statistics that may change after a job starts.
message JobStatistics {
// Time when the job was created.
google.protobuf.Timestamp create_time = 1;
// Time when the job started.
google.protobuf.Timestamp start_time = 2;
// Time when the job ended.
google.protobuf.Timestamp end_time = 3;
// Total bytes processed for a job.
int64 total_processed_bytes = 4;
// Processed bytes, adjusted by the job's CPU usage.
int64 total_billed_bytes = 5;
// The tier assigned by CPU-based billing.
int32 billing_tier = 7;
}
// The fully-qualified name for a dataset.
message DatasetName {
// The project ID.
string project_id = 1;
// The dataset ID within the project.
string dataset_id = 2;
}
// The fully-qualified name for a table.
message TableName {
// The project ID.
string project_id = 1;
// The dataset ID within the project.
string dataset_id = 2;
// The table ID of the table within the dataset.
string table_id = 3;
}
// The fully-qualified name for a job.
message JobName {
// The project ID.
string project_id = 1;
// The job ID within the project.
string job_id = 2;
}