Skip to content

Commit e514139

Browse files
author
Bill Prin
committed
Fix style on LoadDataCsvSample
1 parent 8fc1c6d commit e514139

File tree

1 file changed

+81
-46
lines changed

1 file changed

+81
-46
lines changed
Lines changed: 81 additions & 46 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,15 @@
11
/*
2-
Copyright 2015, Google, Inc.
3-
Licensed under the Apache License, Version 2.0 (the "License");
4-
you may not use this file except in compliance with the License.
5-
You may obtain a copy of the License at
6-
7-
http://www.apache.org/licenses/LICENSE-2.0
8-
9-
Unless required by applicable law or agreed to in writing, software
10-
distributed under the License is distributed on an "AS IS" BASIS,
11-
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12-
See the License for the specific language governing permissions and
2+
Copyright 2015, Google, Inc.
3+
Licensed under the Apache License, Version 2.0 (the "License");
4+
you may not use this file except in compliance with the License.
5+
You may obtain a copy of the License at
6+
7+
http://www.apache.org/licenses/LICENSE-2.0
8+
9+
Unless required by applicable law or agreed to in writing, software
10+
distributed under the License is distributed on an "AS IS" BASIS,
11+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
See the License for the specific language governing permissions and
1313
limitations under the License.
1414
*/
1515
package com.google.cloud.bigquery.samples;
@@ -29,89 +29,124 @@
2929
import java.util.Scanner;
3030

3131
/**
32-
* TODO: Insert description here. (generated by elibixby)
32+
* Cli tool to load data from a CSV into Bigquery.
3333
*/
34-
public class LoadDataCSVSample extends BigqueryUtils {
34+
public class LoadDataCSVSample {
35+
36+
/**
37+
* Protected constructor since this is a collection of static methods.
38+
*/
39+
protected LoadDataCSVSample() {
3540

36-
41+
}
42+
43+
/**
44+
* Cli tool to load data from a CSV into Bigquery.
45+
* @param args Command line args, should be empty
46+
* @throws IOException IOException
47+
* @throws InterruptedException InterruptedException
48+
*/
3749
// [START main]
38-
public static void main(String[] args) throws IOException, InterruptedException{
50+
public static void main(final String[] args) throws IOException,
51+
InterruptedException {
3952
Scanner scanner = new Scanner(System.in);
4053
System.out.println("Enter your project id: ");
4154
String projectId = scanner.nextLine();
4255
System.out.println("Enter your dataset id: ");
4356
String datasetId = scanner.nextLine();
4457
System.out.println("Enter your table id: ");
4558
String tableId = scanner.nextLine();
46-
System.out.println("Enter the Google Cloud Storage Path to the data you'd like to load: ");
59+
System.out.println("Enter the Google Cloud Storage Path to the data "
60+
+ "you'd like to load: ");
4761
String cloudStoragePath = scanner.nextLine();
4862
System.out.println("Enter the filepath to your schema: ");
4963
String sourceSchemaPath = scanner.nextLine();
50-
51-
52-
System.out.println("Enter how often to check if your job is complete (milliseconds): ");
64+
65+
66+
System.out.println("Enter how often to check if your job is complete "
67+
+ "(milliseconds): ");
5368
long interval = scanner.nextLong();
5469
scanner.close();
55-
70+
5671
run(cloudStoragePath,
5772
projectId,
5873
datasetId,
5974
tableId,
6075
new FileReader(new File(sourceSchemaPath)),
6176
interval);
62-
77+
6378
}
6479
// [END main]
65-
80+
81+
/**
82+
* Run the bigquery ClI.
83+
* @param cloudStoragePath The bucket we are using
84+
* @param projectId Project id
85+
* @param datasetId datasetid
86+
* @param tableId tableid
87+
* @param schemaSource Source of the schema
88+
* @param interval interval to wait between polling in milliseconds
89+
* @throws IOException Thrown if there is an error connecting to Bigquery.
90+
* @throws InterruptedException Should never be thrown
91+
*/
6692
// [START run]
67-
public static void run(
68-
String cloudStoragePath,
69-
String projectId,
70-
String datasetId,
71-
String tableId,
72-
Reader schemaSource,
73-
long interval) throws IOException, InterruptedException{
93+
public static void run(
94+
final String cloudStoragePath,
95+
final String projectId,
96+
final String datasetId,
97+
final String tableId,
98+
final Reader schemaSource,
99+
final long interval) throws IOException, InterruptedException {
74100

75101
Bigquery bigquery = BigqueryServiceFactory.getService();
76-
77-
102+
103+
78104
Job loadJob = loadJob(
79105
bigquery,
80106
cloudStoragePath,
81107
new TableReference()
82108
.setProjectId(projectId)
83109
.setDatasetId(datasetId)
84110
.setTableId(tableId),
85-
loadSchema(schemaSource));
111+
BigqueryUtils.loadSchema(schemaSource));
86112

87-
Bigquery.Jobs.Get get_job = bigquery.jobs().get(
88-
loadJob.getJobReference().getProjectId(),
113+
Bigquery.Jobs.Get getJob = bigquery.jobs().get(
114+
loadJob.getJobReference().getProjectId(),
89115
loadJob.getJobReference().getJobId());
90-
91-
pollJob(get_job, interval);
92-
116+
117+
BigqueryUtils.pollJob(getJob, interval);
118+
93119
System.out.println("Load is Done!");
94-
120+
95121
}
96122
// [END run]
97-
123+
124+
/**
125+
* A job that extracts data from a table.
126+
* @param bigquery Bigquery service to use
127+
* @param cloudStoragePath Cloud storage bucket we are inserting into
128+
* @param table Table to extract from
129+
* @param schema The schema of the table we are loading into
130+
* @return The job to extract data from the table
131+
* @throws IOException Thrown if error connceting to Bigtable
132+
*/
98133
// [START load_job]
99134
public static Job loadJob(
100-
Bigquery bigquery,
101-
String cloudStoragePath,
102-
TableReference table,
103-
TableSchema schema) throws IOException{
104-
135+
final Bigquery bigquery,
136+
final String cloudStoragePath,
137+
final TableReference table,
138+
final TableSchema schema) throws IOException {
139+
105140
JobConfigurationLoad load = new JobConfigurationLoad()
106141
.setDestinationTable(table)
107142
.setSchema(schema)
108143
.setSourceUris(Collections.singletonList(cloudStoragePath));
109144

110-
return bigquery.jobs().insert(table.getProjectId(),
145+
return bigquery.jobs().insert(table.getProjectId(),
111146
new Job().setConfiguration(new JobConfiguration().setLoad(load)))
112147
.execute();
113148
}
114149
// [END load_job]
115-
150+
116151

117152
}

0 commit comments

Comments
 (0)