http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_modules/airflow/contrib/hooks/bigquery_hook.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/contrib/hooks/bigquery_hook.html 
b/_modules/airflow/contrib/hooks/bigquery_hook.html
new file mode 100644
index 0000000..1926c79
--- /dev/null
+++ b/_modules/airflow/contrib/hooks/bigquery_hook.html
@@ -0,0 +1,1279 @@
+
+
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+  <meta charset="utf-8">
+  
+  <meta name="viewport" content="width=device-width, initial-scale=1.0">
+  
+  <title>airflow.contrib.hooks.bigquery_hook &mdash; Airflow 
Documentation</title>
+  
+
+  
+  
+  
+  
+
+  
+
+  
+  
+    
+
+  
+
+  
+  
+    <link rel="stylesheet" href="../../../../_static/css/theme.css" 
type="text/css" />
+  
+
+  
+
+  
+        <link rel="index" title="Index"
+              href="../../../../genindex.html"/>
+        <link rel="search" title="Search" href="../../../../search.html"/>
+    <link rel="top" title="Airflow Documentation" 
href="../../../../index.html"/>
+        <link rel="up" title="Module code" href="../../../index.html"/> 
+
+  
+  <script src="../../../../_static/js/modernizr.min.js"></script>
+
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+   
+  <div class="wy-grid-for-nav">
+
+    
+    <nav data-toggle="wy-nav-shift" class="wy-nav-side">
+      <div class="wy-side-scroll">
+        <div class="wy-side-nav-search">
+          
+
+          
+            <a href="../../../../index.html" class="icon icon-home"> Airflow
+          
+
+          
+          </a>
+
+          
+            
+            
+          
+
+          
+<div role="search">
+  <form id="rtd-search-form" class="wy-form" action="../../../../search.html" 
method="get">
+    <input type="text" name="q" placeholder="Search docs" />
+    <input type="hidden" name="check_keywords" value="yes" />
+    <input type="hidden" name="area" value="default" />
+  </form>
+</div>
+
+          
+        </div>
+
+        <div class="wy-menu wy-menu-vertical" data-spy="affix" 
role="navigation" aria-label="main navigation">
+          
+            
+            
+              
+            
+            
+              <ul>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../project.html">Project</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../license.html">License</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../start.html">Quick Start</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../installation.html">Installation</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../tutorial.html">Tutorial</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../configuration.html">Configuration</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../ui.html">UI / Screenshots</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../concepts.html">Concepts</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../profiling.html">Data Profiling</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../cli.html">Command Line Interface</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../scheduler.html">Scheduling &amp; Triggers</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../plugins.html">Plugins</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../security.html">Security</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../api.html">Experimental Rest API</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../integration.html">Integration</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../faq.html">FAQ</a></li>
+<li class="toctree-l1"><a class="reference internal" 
href="../../../../code.html">API Reference</a></li>
+</ul>
+
+            
+          
+        </div>
+      </div>
+    </nav>
+
+    <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+      
+      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+        
+          <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+          <a href="../../../../index.html">Airflow</a>
+        
+      </nav>
+
+
+      
+      <div class="wy-nav-content">
+        <div class="rst-content">
+          
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<div role="navigation" aria-label="breadcrumbs navigation">
+
+  <ul class="wy-breadcrumbs">
+    
+      <li><a href="../../../../index.html">Docs</a> &raquo;</li>
+        
+          <li><a href="../../../index.html">Module code</a> &raquo;</li>
+        
+      <li>airflow.contrib.hooks.bigquery_hook</li>
+    
+    
+      <li class="wy-breadcrumbs-aside">
+        
+            
+        
+      </li>
+    
+  </ul>
+
+  
+  <hr/>
+</div>
+          <div role="main" class="document" itemscope="itemscope" 
itemtype="http://schema.org/Article";>
+           <div itemprop="articleBody">
+            
+  <h1>Source code for airflow.contrib.hooks.bigquery_hook</h1><div 
class="highlight"><pre>
+<span></span><span class="c1"># -*- coding: utf-8 -*-</span>
+<span class="c1">#</span>
+<span class="c1"># Licensed under the Apache License, Version 2.0 (the 
&quot;License&quot;);</span>
+<span class="c1"># you may not use this file except in compliance with the 
License.</span>
+<span class="c1"># You may obtain a copy of the License at</span>
+<span class="c1">#</span>
+<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
+<span class="c1">#</span>
+<span class="c1"># Unless required by applicable law or agreed to in writing, 
software</span>
+<span class="c1"># distributed under the License is distributed on an &quot;AS 
IS&quot; BASIS,</span>
+<span class="c1"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either 
express or implied.</span>
+<span class="c1"># See the License for the specific language governing 
permissions and</span>
+<span class="c1"># limitations under the License.</span>
+<span class="c1">#</span>
+
+<span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">This module contains a BigQuery Hook, as well as a very basic 
PEP 249</span>
+<span class="sd">implementation for BigQuery.</span>
+<span class="sd">&quot;&quot;&quot;</span>
+
+<span class="kn">import</span> <span class="nn">time</span>
+
+<span class="kn">from</span> <span class="nn">apiclient.discovery</span> <span 
class="k">import</span> <span class="n">build</span><span class="p">,</span> 
<span class="n">HttpError</span>
+<span class="kn">from</span> <span class="nn">googleapiclient</span> <span 
class="k">import</span> <span class="n">errors</span>
+<span class="kn">from</span> <span class="nn">builtins</span> <span 
class="k">import</span> <span class="nb">range</span>
+<span class="kn">from</span> <span class="nn">pandas_gbq.gbq</span> <span 
class="k">import</span> <span class="n">GbqConnector</span><span 
class="p">,</span> \
+    <span class="n">_parse_data</span> <span class="k">as</span> <span 
class="n">gbq_parse_data</span><span class="p">,</span> \
+    <span class="n">_check_google_client_version</span> <span 
class="k">as</span> <span class="n">gbq_check_google_client_version</span><span 
class="p">,</span> \
+    <span class="n">_test_google_api_imports</span> <span class="k">as</span> 
<span class="n">gbq_test_google_api_imports</span>
+<span class="kn">from</span> <span class="nn">pandas.tools.merge</span> <span 
class="k">import</span> <span class="n">concat</span>
+<span class="kn">from</span> <span class="nn">past.builtins</span> <span 
class="k">import</span> <span class="n">basestring</span>
+
+<span class="kn">from</span> <span 
class="nn">airflow.contrib.hooks.gcp_api_base_hook</span> <span 
class="k">import</span> <span class="n">GoogleCloudBaseHook</span>
+<span class="kn">from</span> <span class="nn">airflow.hooks.dbapi_hook</span> 
<span class="k">import</span> <span class="n">DbApiHook</span>
+<span class="kn">from</span> <span 
class="nn">airflow.utils.log.logging_mixin</span> <span class="k">import</span> 
<span class="n">LoggingMixin</span>
+
+
+<div class="viewcode-block" id="BigQueryHook"><a class="viewcode-back" 
href="../../../../integration.html#airflow.contrib.hooks.bigquery_hook.BigQueryHook">[docs]</a><span
 class="k">class</span> <span class="nc">BigQueryHook</span><span 
class="p">(</span><span class="n">GoogleCloudBaseHook</span><span 
class="p">,</span> <span class="n">DbApiHook</span><span class="p">,</span> 
<span class="n">LoggingMixin</span><span class="p">):</span>
+    <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">    Interact with BigQuery. This hook uses the Google Cloud 
Platform</span>
+<span class="sd">    connection.</span>
+<span class="sd">    &quot;&quot;&quot;</span>
+    <span class="n">conn_name_attr</span> <span class="o">=</span> <span 
class="s1">&#39;bigquery_conn_id&#39;</span>
+
+    <span class="k">def</span> <span class="nf">__init__</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span>
+                 <span class="n">bigquery_conn_id</span><span 
class="o">=</span><span class="s1">&#39;bigquery_default&#39;</span><span 
class="p">,</span>
+                 <span class="n">delegate_to</span><span 
class="o">=</span><span class="kc">None</span><span class="p">):</span>
+        <span class="nb">super</span><span class="p">(</span><span 
class="n">BigQueryHook</span><span class="p">,</span> <span 
class="bp">self</span><span class="p">)</span><span class="o">.</span><span 
class="fm">__init__</span><span class="p">(</span>
+            <span class="n">conn_id</span><span class="o">=</span><span 
class="n">bigquery_conn_id</span><span class="p">,</span>
+            <span class="n">delegate_to</span><span class="o">=</span><span 
class="n">delegate_to</span><span class="p">)</span>
+
+<div class="viewcode-block" id="BigQueryHook.get_conn"><a 
class="viewcode-back" 
href="../../../../integration.html#airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_conn">[docs]</a>
    <span class="k">def</span> <span class="nf">get_conn</span><span 
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Returns a BigQuery PEP 249 connection object.</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">service</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">get_service</span><span class="p">()</span>
+        <span class="n">project</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">_get_field</span><span class="p">(</span><span 
class="s1">&#39;project&#39;</span><span class="p">)</span>
+        <span class="k">return</span> <span 
class="n">BigQueryConnection</span><span class="p">(</span><span 
class="n">service</span><span class="o">=</span><span 
class="n">service</span><span class="p">,</span> <span 
class="n">project_id</span><span class="o">=</span><span 
class="n">project</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="BigQueryHook.get_service"><a 
class="viewcode-back" 
href="../../../../integration.html#airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_service">[docs]</a>
    <span class="k">def</span> <span class="nf">get_service</span><span 
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Returns a BigQuery service object.</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">http_authorized</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">_authorize</span><span class="p">()</span>
+        <span class="k">return</span> <span class="n">build</span><span 
class="p">(</span><span class="s1">&#39;bigquery&#39;</span><span 
class="p">,</span> <span class="s1">&#39;v2&#39;</span><span class="p">,</span> 
<span class="n">http</span><span class="o">=</span><span 
class="n">http_authorized</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="BigQueryHook.insert_rows"><a 
class="viewcode-back" 
href="../../../../integration.html#airflow.contrib.hooks.bigquery_hook.BigQueryHook.insert_rows">[docs]</a>
    <span class="k">def</span> <span class="nf">insert_rows</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">table</span><span class="p">,</span> <span class="n">rows</span><span 
class="p">,</span> <span class="n">target_fields</span><span 
class="o">=</span><span class="kc">None</span><span class="p">,</span> <span 
class="n">commit_every</span><span class="o">=</span><span 
class="mi">1000</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Insertion is currently unsupported. Theoretically, 
you could use</span>
+<span class="sd">        BigQuery&#39;s streaming API to insert rows into a 
table, but this hasn&#39;t</span>
+<span class="sd">        been implemented.</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="k">raise</span> <span 
class="ne">NotImplementedError</span><span class="p">()</span></div>
+
+<div class="viewcode-block" id="BigQueryHook.get_pandas_df"><a 
class="viewcode-back" 
href="../../../../integration.html#airflow.contrib.hooks.bigquery_hook.BigQueryHook.get_pandas_df">[docs]</a>
    <span class="k">def</span> <span class="nf">get_pandas_df</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">bql</span><span class="p">,</span> <span 
class="n">parameters</span><span class="o">=</span><span 
class="kc">None</span><span class="p">,</span> <span 
class="n">dialect</span><span class="o">=</span><span 
class="s1">&#39;legacy&#39;</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Returns a Pandas DataFrame for the results produced 
by a BigQuery</span>
+<span class="sd">        query. The DbApiHook method must be overridden 
because Pandas</span>
+<span class="sd">        doesn&#39;t support PEP 249 connections, except for 
SQLite. See:</span>
+
+<span class="sd">        
https://github.com/pydata/pandas/blob/master/pandas/io/sql.py#L447</span>
+<span class="sd">        https://github.com/pydata/pandas/issues/6900</span>
+
+<span class="sd">        :param bql: The BigQuery SQL to execute.</span>
+<span class="sd">        :type bql: string</span>
+<span class="sd">        :param parameters: The parameters to render the SQL 
query with (not used, leave to override superclass method)</span>
+<span class="sd">        :type parameters: mapping or iterable</span>
+<span class="sd">        :param dialect: Dialect of BigQuery SQL – legacy 
SQL or standard SQL</span>
+<span class="sd">        :type dialect: string in {&#39;legacy&#39;, 
&#39;standard&#39;}, default &#39;legacy&#39;</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">service</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">get_service</span><span class="p">()</span>
+        <span class="n">project</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">_get_field</span><span class="p">(</span><span 
class="s1">&#39;project&#39;</span><span class="p">)</span>
+        <span class="n">connector</span> <span class="o">=</span> <span 
class="n">BigQueryPandasConnector</span><span class="p">(</span><span 
class="n">project</span><span class="p">,</span> <span 
class="n">service</span><span class="p">,</span> <span 
class="n">dialect</span><span class="o">=</span><span 
class="n">dialect</span><span class="p">)</span>
+        <span class="n">schema</span><span class="p">,</span> <span 
class="n">pages</span> <span class="o">=</span> <span 
class="n">connector</span><span class="o">.</span><span 
class="n">run_query</span><span class="p">(</span><span 
class="n">bql</span><span class="p">)</span>
+        <span class="n">dataframe_list</span> <span class="o">=</span> <span 
class="p">[]</span>
+
+        <span class="k">while</span> <span class="nb">len</span><span 
class="p">(</span><span class="n">pages</span><span class="p">)</span> <span 
class="o">&gt;</span> <span class="mi">0</span><span class="p">:</span>
+            <span class="n">page</span> <span class="o">=</span> <span 
class="n">pages</span><span class="o">.</span><span class="n">pop</span><span 
class="p">()</span>
+            <span class="n">dataframe_list</span><span class="o">.</span><span 
class="n">append</span><span class="p">(</span><span 
class="n">gbq_parse_data</span><span class="p">(</span><span 
class="n">schema</span><span class="p">,</span> <span 
class="n">page</span><span class="p">))</span>
+
+        <span class="k">if</span> <span class="nb">len</span><span 
class="p">(</span><span class="n">dataframe_list</span><span class="p">)</span> 
<span class="o">&gt;</span> <span class="mi">0</span><span class="p">:</span>
+            <span class="k">return</span> <span class="n">concat</span><span 
class="p">(</span><span class="n">dataframe_list</span><span class="p">,</span> 
<span class="n">ignore_index</span><span class="o">=</span><span 
class="kc">True</span><span class="p">)</span>
+        <span class="k">else</span><span class="p">:</span>
+            <span class="k">return</span> <span 
class="n">gbq_parse_data</span><span class="p">(</span><span 
class="n">schema</span><span class="p">,</span> <span class="p">[])</span></div>
+
+<div class="viewcode-block" id="BigQueryHook.table_exists"><a 
class="viewcode-back" 
href="../../../../integration.html#airflow.contrib.hooks.bigquery_hook.BigQueryHook.table_exists">[docs]</a>
    <span class="k">def</span> <span class="nf">table_exists</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">project_id</span><span class="p">,</span> <span 
class="n">dataset_id</span><span class="p">,</span> <span 
class="n">table_id</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Checks for the existence of a table in Google 
BigQuery.</span>
+
+<span class="sd">        :param project_id: The Google cloud project in which 
to look for the table. The connection supplied to the hook</span>
+<span class="sd">        must provide access to the specified project.</span>
+<span class="sd">        :type project_id: string</span>
+<span class="sd">        :param dataset_id: The name of the dataset in which 
to look for the table.</span>
+<span class="sd">            storage bucket.</span>
+<span class="sd">        :type dataset_id: string</span>
+<span class="sd">        :param table_id: The name of the table to check the 
existence of.</span>
+<span class="sd">        :type table_id: string</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">service</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">get_service</span><span class="p">()</span>
+        <span class="k">try</span><span class="p">:</span>
+            <span class="n">service</span><span class="o">.</span><span 
class="n">tables</span><span class="p">()</span><span class="o">.</span><span 
class="n">get</span><span class="p">(</span>
+                <span class="n">projectId</span><span class="o">=</span><span 
class="n">project_id</span><span class="p">,</span>
+                <span class="n">datasetId</span><span class="o">=</span><span 
class="n">dataset_id</span><span class="p">,</span>
+                <span class="n">tableId</span><span class="o">=</span><span 
class="n">table_id</span>
+            <span class="p">)</span><span class="o">.</span><span 
class="n">execute</span><span class="p">()</span>
+            <span class="k">return</span> <span class="kc">True</span>
+        <span class="k">except</span> <span class="n">errors</span><span 
class="o">.</span><span class="n">HttpError</span> <span class="k">as</span> 
<span class="n">e</span><span class="p">:</span>
+            <span class="k">if</span> <span class="n">e</span><span 
class="o">.</span><span class="n">resp</span><span class="p">[</span><span 
class="s1">&#39;status&#39;</span><span class="p">]</span> <span 
class="o">==</span> <span class="s1">&#39;404&#39;</span><span 
class="p">:</span>
+                <span class="k">return</span> <span class="kc">False</span>
+            <span class="k">raise</span></div></div>
+
+
+<span class="k">class</span> <span 
class="nc">BigQueryPandasConnector</span><span class="p">(</span><span 
class="n">GbqConnector</span><span class="p">):</span>
+    <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">    This connector behaves identically to GbqConnector (from 
Pandas), except</span>
+<span class="sd">    that it allows the service to be injected, and disables a 
call to</span>
+<span class="sd">    self.get_credentials(). This allows Airflow to use 
BigQuery with Pandas</span>
+<span class="sd">    without forcing a three legged OAuth connection. Instead, 
we can inject</span>
+<span class="sd">    service account credentials into the binding.</span>
+<span class="sd">    &quot;&quot;&quot;</span>
+    <span class="k">def</span> <span class="nf">__init__</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">project_id</span><span class="p">,</span> <span 
class="n">service</span><span class="p">,</span> <span 
class="n">reauth</span><span class="o">=</span><span 
class="kc">False</span><span class="p">,</span> <span 
class="n">verbose</span><span class="o">=</span><span 
class="kc">False</span><span class="p">,</span> <span 
class="n">dialect</span><span class="o">=</span><span 
class="s1">&#39;legacy&#39;</span><span class="p">):</span>
+        <span class="n">gbq_check_google_client_version</span><span 
class="p">()</span>
+        <span class="n">gbq_test_google_api_imports</span><span 
class="p">()</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span> <span class="o">=</span> <span 
class="n">project_id</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">reauth</span> <span class="o">=</span> <span class="n">reauth</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">service</span> <span class="o">=</span> <span class="n">service</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">verbose</span> <span class="o">=</span> <span class="n">verbose</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">dialect</span> <span class="o">=</span> <span class="n">dialect</span>
+
+
+<span class="k">class</span> <span class="nc">BigQueryConnection</span><span 
class="p">(</span><span class="nb">object</span><span class="p">):</span>
+    <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">    BigQuery does not have a notion of a persistent 
connection. Thus, these</span>
+<span class="sd">    objects are small stateless factories for cursors, which 
do all the real</span>
+<span class="sd">    work.</span>
+<span class="sd">    &quot;&quot;&quot;</span>
+
+    <span class="k">def</span> <span class="nf">__init__</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="o">*</span><span class="n">args</span><span class="p">,</span> <span 
class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">_args</span> <span class="o">=</span> <span class="n">args</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">_kwargs</span> <span class="o">=</span> <span class="n">kwargs</span>
+
+    <span class="k">def</span> <span class="nf">close</span><span 
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot; BigQueryConnection does not have 
anything to close. &quot;&quot;&quot;</span>
+        <span class="k">pass</span>
+
+    <span class="k">def</span> <span class="nf">commit</span><span 
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot; BigQueryConnection does not 
support transactions. &quot;&quot;&quot;</span>
+        <span class="k">pass</span>
+
+    <span class="k">def</span> <span class="nf">cursor</span><span 
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot; Return a new :py:class:`Cursor` 
object using the connection. &quot;&quot;&quot;</span>
+        <span class="k">return</span> <span 
class="n">BigQueryCursor</span><span class="p">(</span><span 
class="o">*</span><span class="bp">self</span><span class="o">.</span><span 
class="n">_args</span><span class="p">,</span> <span class="o">**</span><span 
class="bp">self</span><span class="o">.</span><span 
class="n">_kwargs</span><span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">rollback</span><span 
class="p">(</span><span class="bp">self</span><span class="p">):</span>
+        <span class="k">raise</span> <span 
class="ne">NotImplementedError</span><span class="p">(</span>
+            <span class="s2">&quot;BigQueryConnection does not have 
transactions&quot;</span><span class="p">)</span>
+
+
+<span class="k">class</span> <span class="nc">BigQueryBaseCursor</span><span 
class="p">(</span><span class="n">LoggingMixin</span><span class="p">):</span>
+    <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">    The BigQuery base cursor contains helper methods to 
execute queries against</span>
+<span class="sd">    BigQuery. The methods can be used directly by operators, 
in cases where a</span>
+<span class="sd">    PEP 249 cursor isn&#39;t needed.</span>
+<span class="sd">    &quot;&quot;&quot;</span>
+    <span class="k">def</span> <span class="nf">__init__</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">service</span><span class="p">,</span> <span 
class="n">project_id</span><span class="p">):</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">service</span> <span class="o">=</span> <span class="n">service</span>
+        <span class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span> <span class="o">=</span> <span 
class="n">project_id</span>
+
+    <span class="k">def</span> <span class="nf">run_query</span><span 
class="p">(</span>
+            <span class="bp">self</span><span class="p">,</span> <span 
class="n">bql</span><span class="p">,</span> <span 
class="n">destination_dataset_table</span> <span class="o">=</span> <span 
class="kc">False</span><span class="p">,</span>
+            <span class="n">write_disposition</span> <span class="o">=</span> 
<span class="s1">&#39;WRITE_EMPTY&#39;</span><span class="p">,</span>
+            <span class="n">allow_large_results</span><span 
class="o">=</span><span class="kc">False</span><span class="p">,</span>
+            <span class="n">udf_config</span> <span class="o">=</span> <span 
class="kc">False</span><span class="p">,</span>
+            <span class="n">use_legacy_sql</span><span class="o">=</span><span 
class="kc">True</span><span class="p">,</span>
+            <span class="n">maximum_billing_tier</span><span 
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+            <span class="n">create_disposition</span><span 
class="o">=</span><span class="s1">&#39;CREATE_IF_NEEDED&#39;</span><span 
class="p">,</span>
+            <span class="n">query_params</span><span class="o">=</span><span 
class="kc">None</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Executes a BigQuery SQL query. Optionally persists 
results in a BigQuery</span>
+<span class="sd">        table. See here:</span>
+
+<span class="sd">        
https://cloud.google.com/bigquery/docs/reference/v2/jobs</span>
+
+<span class="sd">        For more details about these parameters.</span>
+
+<span class="sd">        :param bql: The BigQuery SQL to execute.</span>
+<span class="sd">        :type bql: string</span>
+<span class="sd">        :param destination_dataset_table: The dotted 
&lt;dataset&gt;.&lt;table&gt;</span>
+<span class="sd">            BigQuery table to save the query results.</span>
+<span class="sd">        :param write_disposition: What to do if the table 
already exists in</span>
+<span class="sd">            BigQuery.</span>
+<span class="sd">        :type write_disposition: string</span>
+<span class="sd">        :param create_disposition: Specifies whether the job 
is allowed to create new tables.</span>
+<span class="sd">        :type create_disposition: string</span>
+<span class="sd">        :param allow_large_results: Whether to allow large 
results.</span>
+<span class="sd">        :type allow_large_results: boolean</span>
+<span class="sd">        :param udf_config: The User Defined Function 
configuration for the query.</span>
+<span class="sd">            See 
https://cloud.google.com/bigquery/user-defined-functions for details.</span>
+<span class="sd">        :type udf_config: list</span>
+<span class="sd">        :param use_legacy_sql: Whether to use legacy SQL 
(true) or standard SQL (false).</span>
+<span class="sd">        :type use_legacy_sql: boolean</span>
+<span class="sd">        :param maximum_billing_tier: Positive integer that 
serves as a multiplier of the basic price.</span>
+<span class="sd">        :type maximum_billing_tier: integer</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">configuration</span> <span class="o">=</span> <span 
class="p">{</span>
+            <span class="s1">&#39;query&#39;</span><span class="p">:</span> 
<span class="p">{</span>
+                <span class="s1">&#39;query&#39;</span><span 
class="p">:</span> <span class="n">bql</span><span class="p">,</span>
+                <span class="s1">&#39;useLegacySql&#39;</span><span 
class="p">:</span> <span class="n">use_legacy_sql</span><span class="p">,</span>
+                <span class="s1">&#39;maximumBillingTier&#39;</span><span 
class="p">:</span> <span class="n">maximum_billing_tier</span>
+            <span class="p">}</span>
+        <span class="p">}</span>
+
+        <span class="k">if</span> <span 
class="n">destination_dataset_table</span><span class="p">:</span>
+            <span class="k">assert</span> <span class="s1">&#39;.&#39;</span> 
<span class="ow">in</span> <span 
class="n">destination_dataset_table</span><span class="p">,</span> <span 
class="p">(</span>
+                <span class="s1">&#39;Expected destination_dataset_table in 
the format of &#39;</span>
+                <span class="s1">&#39;&lt;dataset&gt;.&lt;table&gt;. Got: 
</span><span class="si">{}</span><span class="s1">&#39;</span><span 
class="p">)</span><span class="o">.</span><span class="n">format</span><span 
class="p">(</span><span class="n">destination_dataset_table</span><span 
class="p">)</span>
+            <span class="n">destination_project</span><span class="p">,</span> 
<span class="n">destination_dataset</span><span class="p">,</span> <span 
class="n">destination_table</span> <span class="o">=</span> \
+                <span class="n">_split_tablename</span><span 
class="p">(</span><span class="n">table_input</span><span 
class="o">=</span><span class="n">destination_dataset_table</span><span 
class="p">,</span>
+                                 <span 
class="n">default_project_id</span><span class="o">=</span><span 
class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">)</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;query&#39;</span><span class="p">]</span><span 
class="o">.</span><span class="n">update</span><span class="p">({</span>
+                <span class="s1">&#39;allowLargeResults&#39;</span><span 
class="p">:</span> <span class="n">allow_large_results</span><span 
class="p">,</span>
+                <span class="s1">&#39;writeDisposition&#39;</span><span 
class="p">:</span> <span class="n">write_disposition</span><span 
class="p">,</span>
+                <span class="s1">&#39;createDisposition&#39;</span><span 
class="p">:</span> <span class="n">create_disposition</span><span 
class="p">,</span>
+                <span class="s1">&#39;destinationTable&#39;</span><span 
class="p">:</span> <span class="p">{</span>
+                    <span class="s1">&#39;projectId&#39;</span><span 
class="p">:</span> <span class="n">destination_project</span><span 
class="p">,</span>
+                    <span class="s1">&#39;datasetId&#39;</span><span 
class="p">:</span> <span class="n">destination_dataset</span><span 
class="p">,</span>
+                    <span class="s1">&#39;tableId&#39;</span><span 
class="p">:</span> <span class="n">destination_table</span><span 
class="p">,</span>
+                <span class="p">}</span>
+            <span class="p">})</span>
+        <span class="k">if</span> <span class="n">udf_config</span><span 
class="p">:</span>
+            <span class="k">assert</span> <span 
class="nb">isinstance</span><span class="p">(</span><span 
class="n">udf_config</span><span class="p">,</span> <span 
class="nb">list</span><span class="p">)</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;query&#39;</span><span class="p">]</span><span 
class="o">.</span><span class="n">update</span><span class="p">({</span>
+                <span 
class="s1">&#39;userDefinedFunctionResources&#39;</span><span 
class="p">:</span> <span class="n">udf_config</span>
+            <span class="p">})</span>
+
+        <span class="k">if</span> <span class="n">query_params</span><span 
class="p">:</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;query&#39;</span><span class="p">][</span><span 
class="s1">&#39;queryParameters&#39;</span><span class="p">]</span> <span 
class="o">=</span> <span class="n">query_params</span>
+
+        <span class="k">return</span> <span class="bp">self</span><span 
class="o">.</span><span class="n">run_with_configuration</span><span 
class="p">(</span><span class="n">configuration</span><span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">run_extract</span><span 
class="p">(</span>  <span class="c1"># noqa</span>
+            <span class="bp">self</span><span class="p">,</span> <span 
class="n">source_project_dataset_table</span><span class="p">,</span> <span 
class="n">destination_cloud_storage_uris</span><span class="p">,</span>
+            <span class="n">compression</span><span class="o">=</span><span 
class="s1">&#39;NONE&#39;</span><span class="p">,</span> <span 
class="n">export_format</span><span class="o">=</span><span 
class="s1">&#39;CSV&#39;</span><span class="p">,</span> <span 
class="n">field_delimiter</span><span class="o">=</span><span 
class="s1">&#39;,&#39;</span><span class="p">,</span>
+            <span class="n">print_header</span><span class="o">=</span><span 
class="kc">True</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Executes a BigQuery extract command to copy data from 
BigQuery to</span>
+<span class="sd">        Google Cloud Storage. See here:</span>
+
+<span class="sd">        
https://cloud.google.com/bigquery/docs/reference/v2/jobs</span>
+
+<span class="sd">        For more details about these parameters.</span>
+
+<span class="sd">        :param source_project_dataset_table: The dotted 
&lt;dataset&gt;.&lt;table&gt;</span>
+<span class="sd">            BigQuery table to use as the source data.</span>
+<span class="sd">        :type source_project_dataset_table: string</span>
+<span class="sd">        :param destination_cloud_storage_uris: The 
destination Google Cloud</span>
+<span class="sd">            Storage URI (e.g. 
gs://some-bucket/some-file.txt). Follows</span>
+<span class="sd">            convention defined here:</span>
+<span class="sd">            
https://cloud.google.com/bigquery/exporting-data-from-bigquery#exportingmultiple</span>
+<span class="sd">        :type destination_cloud_storage_uris: list</span>
+<span class="sd">        :param compression: Type of compression to use.</span>
+<span class="sd">        :type compression: string</span>
+<span class="sd">        :param export_format: File format to export.</span>
+<span class="sd">        :type export_format: string</span>
+<span class="sd">        :param field_delimiter: The delimiter to use when 
extracting to a CSV.</span>
+<span class="sd">        :type field_delimiter: string</span>
+<span class="sd">        :param print_header: Whether to print a header for a 
CSV file extract.</span>
+<span class="sd">        :type print_header: boolean</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+
+        <span class="n">source_project</span><span class="p">,</span> <span 
class="n">source_dataset</span><span class="p">,</span> <span 
class="n">source_table</span> <span class="o">=</span> \
+            <span class="n">_split_tablename</span><span 
class="p">(</span><span class="n">table_input</span><span 
class="o">=</span><span class="n">source_project_dataset_table</span><span 
class="p">,</span>
+                             <span class="n">default_project_id</span><span 
class="o">=</span><span class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span>
+                             <span class="n">var_name</span><span 
class="o">=</span><span 
class="s1">&#39;source_project_dataset_table&#39;</span><span class="p">)</span>
+
+        <span class="n">configuration</span> <span class="o">=</span> <span 
class="p">{</span>
+            <span class="s1">&#39;extract&#39;</span><span class="p">:</span> 
<span class="p">{</span>
+                <span class="s1">&#39;sourceTable&#39;</span><span 
class="p">:</span> <span class="p">{</span>
+                    <span class="s1">&#39;projectId&#39;</span><span 
class="p">:</span> <span class="n">source_project</span><span class="p">,</span>
+                    <span class="s1">&#39;datasetId&#39;</span><span 
class="p">:</span> <span class="n">source_dataset</span><span class="p">,</span>
+                    <span class="s1">&#39;tableId&#39;</span><span 
class="p">:</span> <span class="n">source_table</span><span class="p">,</span>
+                <span class="p">},</span>
+                <span class="s1">&#39;compression&#39;</span><span 
class="p">:</span> <span class="n">compression</span><span class="p">,</span>
+                <span class="s1">&#39;destinationUris&#39;</span><span 
class="p">:</span> <span class="n">destination_cloud_storage_uris</span><span 
class="p">,</span>
+                <span class="s1">&#39;destinationFormat&#39;</span><span 
class="p">:</span> <span class="n">export_format</span><span class="p">,</span>
+            <span class="p">}</span>
+        <span class="p">}</span>
+
+        <span class="k">if</span> <span class="n">export_format</span> <span 
class="o">==</span> <span class="s1">&#39;CSV&#39;</span><span 
class="p">:</span>
+            <span class="c1"># Only set fieldDelimiter and printHeader fields 
if using CSV.</span>
+            <span class="c1"># Google does not like it if you set these fields 
for other export</span>
+            <span class="c1"># formats.</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;extract&#39;</span><span class="p">][</span><span 
class="s1">&#39;fieldDelimiter&#39;</span><span class="p">]</span> <span 
class="o">=</span> <span class="n">field_delimiter</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;extract&#39;</span><span class="p">][</span><span 
class="s1">&#39;printHeader&#39;</span><span class="p">]</span> <span 
class="o">=</span> <span class="n">print_header</span>
+
+        <span class="k">return</span> <span class="bp">self</span><span 
class="o">.</span><span class="n">run_with_configuration</span><span 
class="p">(</span><span class="n">configuration</span><span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">run_copy</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span>
+                 <span class="n">source_project_dataset_tables</span><span 
class="p">,</span>
+                 <span class="n">destination_project_dataset_table</span><span 
class="p">,</span>
+                 <span class="n">write_disposition</span><span 
class="o">=</span><span class="s1">&#39;WRITE_EMPTY&#39;</span><span 
class="p">,</span>
+                 <span class="n">create_disposition</span><span 
class="o">=</span><span class="s1">&#39;CREATE_IF_NEEDED&#39;</span><span 
class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Executes a BigQuery copy command to copy data from 
one BigQuery table</span>
+<span class="sd">        to another. See here:</span>
+
+<span class="sd">        
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.copy</span>
+
+<span class="sd">        For more details about these parameters.</span>
+
+<span class="sd">        :param source_project_dataset_tables: One or more 
dotted</span>
+<span class="sd">            
(project:|project.)&lt;dataset&gt;.&lt;table&gt;</span>
+<span class="sd">            BigQuery tables to use as the source data. Use a 
list if there are</span>
+<span class="sd">            multiple source tables.</span>
+<span class="sd">            If &lt;project&gt; is not included, project will 
be the project defined</span>
+<span class="sd">            in the connection json.</span>
+<span class="sd">        :type source_project_dataset_tables: 
list|string</span>
+<span class="sd">        :param destination_project_dataset_table: The 
destination BigQuery</span>
+<span class="sd">            table. Format is: 
(project:|project.)&lt;dataset&gt;.&lt;table&gt;</span>
+<span class="sd">        :type destination_project_dataset_table: string</span>
+<span class="sd">        :param write_disposition: The write disposition if 
the table already exists.</span>
+<span class="sd">        :type write_disposition: string</span>
+<span class="sd">        :param create_disposition: The create disposition if 
the table doesn&#39;t exist.</span>
+<span class="sd">        :type create_disposition: string</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">source_project_dataset_tables</span> <span 
class="o">=</span> <span class="p">(</span>
+            <span class="p">[</span><span 
class="n">source_project_dataset_tables</span><span class="p">]</span>
+            <span class="k">if</span> <span class="ow">not</span> <span 
class="nb">isinstance</span><span class="p">(</span><span 
class="n">source_project_dataset_tables</span><span class="p">,</span> <span 
class="nb">list</span><span class="p">)</span>
+            <span class="k">else</span> <span 
class="n">source_project_dataset_tables</span><span class="p">)</span>
+
+        <span class="n">source_project_dataset_tables_fixup</span> <span 
class="o">=</span> <span class="p">[]</span>
+        <span class="k">for</span> <span 
class="n">source_project_dataset_table</span> <span class="ow">in</span> <span 
class="n">source_project_dataset_tables</span><span class="p">:</span>
+            <span class="n">source_project</span><span class="p">,</span> 
<span class="n">source_dataset</span><span class="p">,</span> <span 
class="n">source_table</span> <span class="o">=</span> \
+                <span class="n">_split_tablename</span><span 
class="p">(</span><span class="n">table_input</span><span 
class="o">=</span><span class="n">source_project_dataset_table</span><span 
class="p">,</span>
+                                 <span 
class="n">default_project_id</span><span class="o">=</span><span 
class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span>
+                                 <span class="n">var_name</span><span 
class="o">=</span><span 
class="s1">&#39;source_project_dataset_table&#39;</span><span class="p">)</span>
+            <span class="n">source_project_dataset_tables_fixup</span><span 
class="o">.</span><span class="n">append</span><span class="p">({</span>
+                <span class="s1">&#39;projectId&#39;</span><span 
class="p">:</span> <span class="n">source_project</span><span class="p">,</span>
+                <span class="s1">&#39;datasetId&#39;</span><span 
class="p">:</span> <span class="n">source_dataset</span><span class="p">,</span>
+                <span class="s1">&#39;tableId&#39;</span><span 
class="p">:</span> <span class="n">source_table</span>
+            <span class="p">})</span>
+
+        <span class="n">destination_project</span><span class="p">,</span> 
<span class="n">destination_dataset</span><span class="p">,</span> <span 
class="n">destination_table</span> <span class="o">=</span> \
+            <span class="n">_split_tablename</span><span 
class="p">(</span><span class="n">table_input</span><span 
class="o">=</span><span class="n">destination_project_dataset_table</span><span 
class="p">,</span>
+                             <span class="n">default_project_id</span><span 
class="o">=</span><span class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">)</span>
+        <span class="n">configuration</span> <span class="o">=</span> <span 
class="p">{</span>
+            <span class="s1">&#39;copy&#39;</span><span class="p">:</span> 
<span class="p">{</span>
+                <span class="s1">&#39;createDisposition&#39;</span><span 
class="p">:</span> <span class="n">create_disposition</span><span 
class="p">,</span>
+                <span class="s1">&#39;writeDisposition&#39;</span><span 
class="p">:</span> <span class="n">write_disposition</span><span 
class="p">,</span>
+                <span class="s1">&#39;sourceTables&#39;</span><span 
class="p">:</span> <span 
class="n">source_project_dataset_tables_fixup</span><span class="p">,</span>
+                <span class="s1">&#39;destinationTable&#39;</span><span 
class="p">:</span> <span class="p">{</span>
+                    <span class="s1">&#39;projectId&#39;</span><span 
class="p">:</span> <span class="n">destination_project</span><span 
class="p">,</span>
+                    <span class="s1">&#39;datasetId&#39;</span><span 
class="p">:</span> <span class="n">destination_dataset</span><span 
class="p">,</span>
+                    <span class="s1">&#39;tableId&#39;</span><span 
class="p">:</span> <span class="n">destination_table</span>
+                <span class="p">}</span>
+            <span class="p">}</span>
+        <span class="p">}</span>
+
+        <span class="k">return</span> <span class="bp">self</span><span 
class="o">.</span><span class="n">run_with_configuration</span><span 
class="p">(</span><span class="n">configuration</span><span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">run_load</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span>
+                 <span class="n">destination_project_dataset_table</span><span 
class="p">,</span>
+                 <span class="n">schema_fields</span><span class="p">,</span> 
<span class="n">source_uris</span><span class="p">,</span>
+                 <span class="n">source_format</span><span 
class="o">=</span><span class="s1">&#39;CSV&#39;</span><span class="p">,</span>
+                 <span class="n">create_disposition</span><span 
class="o">=</span><span class="s1">&#39;CREATE_IF_NEEDED&#39;</span><span 
class="p">,</span>
+                 <span class="n">skip_leading_rows</span><span 
class="o">=</span><span class="mi">0</span><span class="p">,</span>
+                 <span class="n">write_disposition</span><span 
class="o">=</span><span class="s1">&#39;WRITE_EMPTY&#39;</span><span 
class="p">,</span>
+                 <span class="n">field_delimiter</span><span 
class="o">=</span><span class="s1">&#39;,&#39;</span><span class="p">,</span>
+                 <span class="n">max_bad_records</span><span 
class="o">=</span><span class="mi">0</span><span class="p">,</span>
+                 <span class="n">quote_character</span><span 
class="o">=</span><span class="kc">None</span><span class="p">,</span>
+                 <span class="n">allow_quoted_newlines</span><span 
class="o">=</span><span class="kc">False</span><span class="p">,</span>
+                 <span class="n">allow_jagged_rows</span><span 
class="o">=</span><span class="kc">False</span><span class="p">,</span>
+                 <span class="n">schema_update_options</span><span 
class="o">=</span><span class="p">(),</span>
+                 <span class="n">src_fmt_configs</span><span 
class="o">=</span><span class="p">{}):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Executes a BigQuery load command to load data from 
Google Cloud Storage</span>
+<span class="sd">        to BigQuery. See here:</span>
+
+<span class="sd">        
https://cloud.google.com/bigquery/docs/reference/v2/jobs</span>
+
+<span class="sd">        For more details about these parameters.</span>
+
+<span class="sd">        :param destination_project_dataset_table:</span>
+<span class="sd">            The dotted 
(&lt;project&gt;.|&lt;project&gt;:)&lt;dataset&gt;.&lt;table&gt; BigQuery table 
to load</span>
+<span class="sd">            data into. If &lt;project&gt; is not included, 
project will be the project defined</span>
+<span class="sd">            in the connection json.</span>
+<span class="sd">        :type destination_project_dataset_table: string</span>
+<span class="sd">        :param schema_fields: The schema field list as 
defined here:</span>
+<span class="sd">            
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load</span>
+<span class="sd">        :type schema_fields: list</span>
+<span class="sd">        :param source_uris: The source Google Cloud</span>
+<span class="sd">            Storage URI (e.g. 
gs://some-bucket/some-file.txt). A single wild</span>
+<span class="sd">            per-object name can be used.</span>
+<span class="sd">        :type source_uris: list</span>
+<span class="sd">        :param source_format: File format to export.</span>
+<span class="sd">        :type source_format: string</span>
+<span class="sd">        :param create_disposition: The create disposition if 
the table doesn&#39;t exist.</span>
+<span class="sd">        :type create_disposition: string</span>
+<span class="sd">        :param skip_leading_rows: Number of rows to skip when 
loading from a CSV.</span>
+<span class="sd">        :type skip_leading_rows: int</span>
+<span class="sd">        :param write_disposition: The write disposition if 
the table already exists.</span>
+<span class="sd">        :type write_disposition: string</span>
+<span class="sd">        :param field_delimiter: The delimiter to use when 
loading from a CSV.</span>
+<span class="sd">        :type field_delimiter: string</span>
+<span class="sd">        :param max_bad_records: The maximum number of bad 
records that BigQuery can</span>
+<span class="sd">            ignore when running the job.</span>
+<span class="sd">        :type max_bad_records: int</span>
+<span class="sd">        :param quote_character: The value that is used to 
quote data sections in a CSV file.</span>
+<span class="sd">        :type quote_character: string</span>
+<span class="sd">        :param allow_quoted_newlines: Whether to allow quoted 
newlines (true) or not (false).</span>
+<span class="sd">        :type allow_quoted_newlines: boolean</span>
+<span class="sd">        :param allow_jagged_rows: Accept rows that are 
missing trailing optional columns.</span>
+<span class="sd">            The missing values are treated as nulls. If 
false, records with missing trailing columns</span>
+<span class="sd">            are treated as bad records, and if there are too 
many bad records, an invalid error is</span>
+<span class="sd">            returned in the job result. Only applicable when 
soure_format is CSV.</span>
+<span class="sd">        :type allow_jagged_rows: bool</span>
+<span class="sd">        :param schema_update_options: Allows the schema of 
the desitination</span>
+<span class="sd">            table to be updated as a side effect of the load 
job.</span>
+<span class="sd">        :type schema_update_options: list</span>
+<span class="sd">        :param src_fmt_configs: configure optional fields 
specific to the source format</span>
+<span class="sd">        :type src_fmt_configs: dict</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+
+        <span class="c1"># bigquery only allows certain source formats</span>
+        <span class="c1"># we check to make sure the passed source format is 
valid</span>
+        <span class="c1"># if it&#39;s not, we raise a ValueError</span>
+        <span class="c1"># Refer to this link for more details:</span>
+        <span class="c1">#   
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).sourceFormat</span>
+        <span class="n">source_format</span> <span class="o">=</span> <span 
class="n">source_format</span><span class="o">.</span><span 
class="n">upper</span><span class="p">()</span>
+        <span class="n">allowed_formats</span> <span class="o">=</span> <span 
class="p">[</span><span class="s2">&quot;CSV&quot;</span><span 
class="p">,</span> <span 
class="s2">&quot;NEWLINE_DELIMITED_JSON&quot;</span><span class="p">,</span> 
<span class="s2">&quot;AVRO&quot;</span><span class="p">,</span> <span 
class="s2">&quot;GOOGLE_SHEETS&quot;</span><span class="p">,</span> <span 
class="s2">&quot;DATASTORE_BACKUP&quot;</span><span class="p">]</span>
+        <span class="k">if</span> <span class="n">source_format</span> <span 
class="ow">not</span> <span class="ow">in</span> <span 
class="n">allowed_formats</span><span class="p">:</span>
+            <span class="k">raise</span> <span 
class="ne">ValueError</span><span class="p">(</span><span 
class="s2">&quot;</span><span class="si">{0}</span><span class="s2"> is not a 
valid source format. &quot;</span>
+                    <span class="s2">&quot;Please use one of the following 
types: </span><span class="si">{1}</span><span class="s2">&quot;</span>
+                    <span class="o">.</span><span class="n">format</span><span 
class="p">(</span><span class="n">source_format</span><span class="p">,</span> 
<span class="n">allowed_formats</span><span class="p">))</span>
+
+        <span class="c1"># bigquery also allows you to define how you want a 
table&#39;s schema to change</span>
+        <span class="c1"># as a side effect of a load</span>
+        <span class="c1"># for more details:</span>
+        <span class="c1">#   
https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schemaUpdateOptions</span>
+        <span class="n">allowed_schema_update_options</span> <span 
class="o">=</span> <span class="p">[</span>
+            <span class="s1">&#39;ALLOW_FIELD_ADDITION&#39;</span><span 
class="p">,</span>
+            <span class="s2">&quot;ALLOW_FIELD_RELAXATION&quot;</span>
+        <span class="p">]</span>
+        <span class="k">if</span> <span class="ow">not</span> <span 
class="nb">set</span><span class="p">(</span><span 
class="n">allowed_schema_update_options</span><span class="p">)</span><span 
class="o">.</span><span class="n">issuperset</span><span 
class="p">(</span><span class="nb">set</span><span class="p">(</span><span 
class="n">schema_update_options</span><span class="p">)):</span>
+            <span class="k">raise</span> <span 
class="ne">ValueError</span><span class="p">(</span>
+                <span class="s2">&quot;</span><span class="si">{0}</span><span 
class="s2"> contains invalid schema update options. &quot;</span>
+                <span class="s2">&quot;Please only use one or more of the 
following options: </span><span class="si">{1}</span><span 
class="s2">&quot;</span>
+                <span class="o">.</span><span class="n">format</span><span 
class="p">(</span><span class="n">schema_update_options</span><span 
class="p">,</span> <span class="n">allowed_schema_update_options</span><span 
class="p">)</span>
+            <span class="p">)</span>
+
+        <span class="n">destination_project</span><span class="p">,</span> 
<span class="n">destination_dataset</span><span class="p">,</span> <span 
class="n">destination_table</span> <span class="o">=</span> \
+            <span class="n">_split_tablename</span><span 
class="p">(</span><span class="n">table_input</span><span 
class="o">=</span><span class="n">destination_project_dataset_table</span><span 
class="p">,</span>
+                             <span class="n">default_project_id</span><span 
class="o">=</span><span class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span>
+                             <span class="n">var_name</span><span 
class="o">=</span><span 
class="s1">&#39;destination_project_dataset_table&#39;</span><span 
class="p">)</span>
+
+        <span class="n">configuration</span> <span class="o">=</span> <span 
class="p">{</span>
+            <span class="s1">&#39;load&#39;</span><span class="p">:</span> 
<span class="p">{</span>
+                <span class="s1">&#39;createDisposition&#39;</span><span 
class="p">:</span> <span class="n">create_disposition</span><span 
class="p">,</span>
+                <span class="s1">&#39;destinationTable&#39;</span><span 
class="p">:</span> <span class="p">{</span>
+                    <span class="s1">&#39;projectId&#39;</span><span 
class="p">:</span> <span class="n">destination_project</span><span 
class="p">,</span>
+                    <span class="s1">&#39;datasetId&#39;</span><span 
class="p">:</span> <span class="n">destination_dataset</span><span 
class="p">,</span>
+                    <span class="s1">&#39;tableId&#39;</span><span 
class="p">:</span> <span class="n">destination_table</span><span 
class="p">,</span>
+                <span class="p">},</span>
+                <span class="s1">&#39;sourceFormat&#39;</span><span 
class="p">:</span> <span class="n">source_format</span><span class="p">,</span>
+                <span class="s1">&#39;sourceUris&#39;</span><span 
class="p">:</span> <span class="n">source_uris</span><span class="p">,</span>
+                <span class="s1">&#39;writeDisposition&#39;</span><span 
class="p">:</span> <span class="n">write_disposition</span><span 
class="p">,</span>
+            <span class="p">}</span>
+        <span class="p">}</span>
+        <span class="k">if</span> <span class="n">schema_fields</span><span 
class="p">:</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;load&#39;</span><span class="p">][</span><span 
class="s1">&#39;schema&#39;</span><span class="p">]</span> <span 
class="o">=</span> <span class="p">{</span>
+                <span class="s1">&#39;fields&#39;</span><span 
class="p">:</span> <span class="n">schema_fields</span>
+            <span class="p">}</span>
+
+        <span class="k">if</span> <span 
class="n">schema_update_options</span><span class="p">:</span>
+            <span class="k">if</span> <span class="n">write_disposition</span> 
<span class="ow">not</span> <span class="ow">in</span> <span 
class="p">[</span><span class="s2">&quot;WRITE_APPEND&quot;</span><span 
class="p">,</span> <span class="s2">&quot;WRITE_TRUNCATE&quot;</span><span 
class="p">]:</span>
+                <span class="k">raise</span> <span 
class="ne">ValueError</span><span class="p">(</span>
+                    <span class="s2">&quot;schema_update_options is only 
&quot;</span>
+                    <span class="s2">&quot;allowed if write_disposition is 
&quot;</span>
+                    <span class="s2">&quot;&#39;WRITE_APPEND&#39; or 
&#39;WRITE_TRUNCATE&#39;.&quot;</span>
+                <span class="p">)</span>
+            <span class="k">else</span><span class="p">:</span>
+                <span class="bp">self</span><span class="o">.</span><span 
class="n">log</span><span class="o">.</span><span class="n">info</span><span 
class="p">(</span>
+                    <span class="s2">&quot;Adding experimental &quot;</span>
+                    <span class="s2">&quot;&#39;schemaUpdateOptions&#39;: 
</span><span class="si">{0}</span><span class="s2">&quot;</span><span 
class="o">.</span><span class="n">format</span><span class="p">(</span><span 
class="n">schema_update_options</span><span class="p">)</span>
+                <span class="p">)</span>
+                <span class="n">configuration</span><span 
class="p">[</span><span class="s1">&#39;load&#39;</span><span 
class="p">][</span><span class="s1">&#39;schemaUpdateOptions&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span 
class="n">schema_update_options</span>
+
+        <span class="k">if</span> <span class="n">max_bad_records</span><span 
class="p">:</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;load&#39;</span><span class="p">][</span><span 
class="s1">&#39;maxBadRecords&#39;</span><span class="p">]</span> <span 
class="o">=</span> <span class="n">max_bad_records</span>
+
+        <span class="c1"># if following fields are not specified in 
src_fmt_configs,</span>
+        <span class="c1"># honor the top-level params for 
backward-compatibility</span>
+        <span class="k">if</span> <span 
class="s1">&#39;skipLeadingRows&#39;</span> <span class="ow">not</span> <span 
class="ow">in</span> <span class="n">src_fmt_configs</span><span 
class="p">:</span>
+            <span class="n">src_fmt_configs</span><span 
class="p">[</span><span class="s1">&#39;skipLeadingRows&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span 
class="n">skip_leading_rows</span>
+        <span class="k">if</span> <span 
class="s1">&#39;fieldDelimiter&#39;</span> <span class="ow">not</span> <span 
class="ow">in</span> <span class="n">src_fmt_configs</span><span 
class="p">:</span>
+            <span class="n">src_fmt_configs</span><span 
class="p">[</span><span class="s1">&#39;fieldDelimiter&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span 
class="n">field_delimiter</span>
+        <span class="k">if</span> <span class="n">quote_character</span><span 
class="p">:</span>
+            <span class="n">src_fmt_configs</span><span 
class="p">[</span><span class="s1">&#39;quote&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span 
class="n">quote_character</span>
+        <span class="k">if</span> <span 
class="n">allow_quoted_newlines</span><span class="p">:</span>
+            <span class="n">src_fmt_configs</span><span 
class="p">[</span><span class="s1">&#39;allowQuotedNewlines&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span 
class="n">allow_quoted_newlines</span>
+
+        <span class="n">src_fmt_to_configs_mapping</span> <span 
class="o">=</span> <span class="p">{</span>
+            <span class="s1">&#39;CSV&#39;</span><span class="p">:</span> 
<span class="p">[</span><span class="s1">&#39;allowJaggedRows&#39;</span><span 
class="p">,</span> <span class="s1">&#39;allowQuotedNewlines&#39;</span><span 
class="p">,</span> <span class="s1">&#39;autodetect&#39;</span><span 
class="p">,</span>
+                    <span class="s1">&#39;fieldDelimiter&#39;</span><span 
class="p">,</span> <span class="s1">&#39;skipLeadingRows&#39;</span><span 
class="p">,</span> <span class="s1">&#39;ignoreUnknownValues&#39;</span><span 
class="p">,</span>
+                    <span class="s1">&#39;nullMarker&#39;</span><span 
class="p">,</span> <span class="s1">&#39;quote&#39;</span><span 
class="p">],</span>
+            <span class="s1">&#39;DATASTORE_BACKUP&#39;</span><span 
class="p">:</span> <span class="p">[</span><span 
class="s1">&#39;projectionFields&#39;</span><span class="p">],</span>
+            <span class="s1">&#39;NEWLINE_DELIMITED_JSON&#39;</span><span 
class="p">:</span> <span class="p">[</span><span 
class="s1">&#39;autodetect&#39;</span><span class="p">,</span> <span 
class="s1">&#39;ignoreUnknownValues&#39;</span><span class="p">],</span>
+            <span class="s1">&#39;AVRO&#39;</span><span class="p">:</span> 
<span class="p">[],</span>
+        <span class="p">}</span>
+        <span class="n">valid_configs</span> <span class="o">=</span> <span 
class="n">src_fmt_to_configs_mapping</span><span class="p">[</span><span 
class="n">source_format</span><span class="p">]</span>
+        <span class="n">src_fmt_configs</span> <span class="o">=</span> <span 
class="p">{</span><span class="n">k</span><span class="p">:</span> <span 
class="n">v</span> <span class="k">for</span> <span class="n">k</span><span 
class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span 
class="n">src_fmt_configs</span><span class="o">.</span><span 
class="n">items</span><span class="p">()</span>
+                           <span class="k">if</span> <span class="n">k</span> 
<span class="ow">in</span> <span class="n">valid_configs</span><span 
class="p">}</span>
+        <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;load&#39;</span><span class="p">]</span><span 
class="o">.</span><span class="n">update</span><span class="p">(</span><span 
class="n">src_fmt_configs</span><span class="p">)</span>
+
+        <span class="k">if</span> <span 
class="n">allow_jagged_rows</span><span class="p">:</span>
+            <span class="n">configuration</span><span class="p">[</span><span 
class="s1">&#39;load&#39;</span><span class="p">][</span><span 
class="s1">&#39;allowJaggedRows&#39;</span><span class="p">]</span> <span 
class="o">=</span> <span class="n">allow_jagged_rows</span>
+
+        <span class="k">return</span> <span class="bp">self</span><span 
class="o">.</span><span class="n">run_with_configuration</span><span 
class="p">(</span><span class="n">configuration</span><span class="p">)</span>
+
+    <span class="k">def</span> <span 
class="nf">run_with_configuration</span><span class="p">(</span><span 
class="bp">self</span><span class="p">,</span> <span 
class="n">configuration</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Executes a BigQuery SQL query. See here:</span>
+
+<span class="sd">        
https://cloud.google.com/bigquery/docs/reference/v2/jobs</span>
+
+<span class="sd">        For more details about the configuration 
parameter.</span>
+
+<span class="sd">        :param configuration: The configuration parameter 
maps directly to</span>
+<span class="sd">            BigQuery&#39;s configuration field in the job 
object. See</span>
+<span class="sd">            
https://cloud.google.com/bigquery/docs/reference/v2/jobs for</span>
+<span class="sd">            details.</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">jobs</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">service</span><span class="o">.</span><span 
class="n">jobs</span><span class="p">()</span>
+        <span class="n">job_data</span> <span class="o">=</span> <span 
class="p">{</span>
+            <span class="s1">&#39;configuration&#39;</span><span 
class="p">:</span> <span class="n">configuration</span>
+        <span class="p">}</span>
+
+        <span class="c1"># Send query and wait for reply.</span>
+        <span class="n">query_reply</span> <span class="o">=</span> <span 
class="n">jobs</span> \
+            <span class="o">.</span><span class="n">insert</span><span 
class="p">(</span><span class="n">projectId</span><span class="o">=</span><span 
class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span> <span 
class="n">body</span><span class="o">=</span><span 
class="n">job_data</span><span class="p">)</span> \
+            <span class="o">.</span><span class="n">execute</span><span 
class="p">()</span>
+        <span class="n">job_id</span> <span class="o">=</span> <span 
class="n">query_reply</span><span class="p">[</span><span 
class="s1">&#39;jobReference&#39;</span><span class="p">][</span><span 
class="s1">&#39;jobId&#39;</span><span class="p">]</span>
+
+        <span class="c1"># Wait for query to finish.</span>
+        <span class="n">keep_polling_job</span> <span class="o">=</span> <span 
class="kc">True</span>
+        <span class="k">while</span> <span class="p">(</span><span 
class="n">keep_polling_job</span><span class="p">):</span>
+            <span class="k">try</span><span class="p">:</span>
+                <span class="n">job</span> <span class="o">=</span> <span 
class="n">jobs</span><span class="o">.</span><span class="n">get</span><span 
class="p">(</span><span class="n">projectId</span><span class="o">=</span><span 
class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span> <span 
class="n">jobId</span><span class="o">=</span><span 
class="n">job_id</span><span class="p">)</span><span class="o">.</span><span 
class="n">execute</span><span class="p">()</span>
+                <span class="k">if</span> <span class="p">(</span><span 
class="n">job</span><span class="p">[</span><span 
class="s1">&#39;status&#39;</span><span class="p">][</span><span 
class="s1">&#39;state&#39;</span><span class="p">]</span> <span 
class="o">==</span> <span class="s1">&#39;DONE&#39;</span><span 
class="p">):</span>
+                    <span class="n">keep_polling_job</span> <span 
class="o">=</span> <span class="kc">False</span>
+                    <span class="c1"># Check if job had errors.</span>
+                    <span class="k">if</span> <span 
class="s1">&#39;errorResult&#39;</span> <span class="ow">in</span> <span 
class="n">job</span><span class="p">[</span><span 
class="s1">&#39;status&#39;</span><span class="p">]:</span>
+                        <span class="k">raise</span> <span 
class="ne">Exception</span><span class="p">(</span>
+                            <span class="s1">&#39;BigQuery job failed. Final 
error was: </span><span class="si">{}</span><span class="s1">. The job was: 
</span><span class="si">{}</span><span class="s1">&#39;</span><span 
class="o">.</span><span class="n">format</span><span class="p">(</span>
+                                <span class="n">job</span><span 
class="p">[</span><span class="s1">&#39;status&#39;</span><span 
class="p">][</span><span class="s1">&#39;errorResult&#39;</span><span 
class="p">],</span> <span class="n">job</span>
+                            <span class="p">)</span>
+                        <span class="p">)</span>
+                <span class="k">else</span><span class="p">:</span>
+                    <span class="bp">self</span><span class="o">.</span><span 
class="n">log</span><span class="o">.</span><span class="n">info</span><span 
class="p">(</span><span class="s1">&#39;Waiting for job to complete : 
</span><span class="si">%s</span><span class="s1">, </span><span 
class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span> <span 
class="n">job_id</span><span class="p">)</span>
+                    <span class="n">time</span><span class="o">.</span><span 
class="n">sleep</span><span class="p">(</span><span class="mi">5</span><span 
class="p">)</span>
+
+            <span class="k">except</span> <span class="n">HttpError</span> 
<span class="k">as</span> <span class="n">err</span><span class="p">:</span>
+                <span class="k">if</span> <span class="n">err</span><span 
class="o">.</span><span class="n">resp</span><span class="o">.</span><span 
class="n">status</span> <span class="ow">in</span> <span 
class="p">[</span><span class="mi">500</span><span class="p">,</span> <span 
class="mi">503</span><span class="p">]:</span>
+                    <span class="bp">self</span><span class="o">.</span><span 
class="n">log</span><span class="o">.</span><span class="n">info</span><span 
class="p">(</span><span class="s1">&#39;</span><span class="si">%s</span><span 
class="s1">: Retryable error, waiting for job to complete: </span><span 
class="si">%s</span><span class="s1">&#39;</span><span class="p">,</span> <span 
class="n">err</span><span class="o">.</span><span class="n">resp</span><span 
class="o">.</span><span class="n">status</span><span class="p">,</span> <span 
class="n">job_id</span><span class="p">)</span>
+                    <span class="n">time</span><span class="o">.</span><span 
class="n">sleep</span><span class="p">(</span><span class="mi">5</span><span 
class="p">)</span>
+                <span class="k">else</span><span class="p">:</span>
+                    <span class="k">raise</span> <span 
class="ne">Exception</span><span class="p">(</span>
+                        <span class="s1">&#39;BigQuery job status check 
failed. Final error was: </span><span class="si">%s</span><span 
class="s1">&#39;</span><span class="p">,</span> <span class="n">err</span><span 
class="o">.</span><span class="n">resp</span><span class="o">.</span><span 
class="n">status</span><span class="p">)</span>
+
+        <span class="k">return</span> <span class="n">job_id</span>
+
+    <span class="k">def</span> <span class="nf">get_schema</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">dataset_id</span><span class="p">,</span> <span 
class="n">table_id</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Get the schema for a given datset.table.</span>
+<span class="sd">        see 
https://cloud.google.com/bigquery/docs/reference/v2/tables#resource</span>
+
+<span class="sd">        :param dataset_id: the dataset ID of the requested 
table</span>
+<span class="sd">        :param table_id: the table ID of the requested 
table</span>
+<span class="sd">        :return: a table schema</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">tables_resource</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">service</span><span class="o">.</span><span 
class="n">tables</span><span class="p">()</span> \
+            <span class="o">.</span><span class="n">get</span><span 
class="p">(</span><span class="n">projectId</span><span class="o">=</span><span 
class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span> <span 
class="n">datasetId</span><span class="o">=</span><span 
class="n">dataset_id</span><span class="p">,</span> <span 
class="n">tableId</span><span class="o">=</span><span 
class="n">table_id</span><span class="p">)</span> \
+            <span class="o">.</span><span class="n">execute</span><span 
class="p">()</span>
+        <span class="k">return</span> <span 
class="n">tables_resource</span><span class="p">[</span><span 
class="s1">&#39;schema&#39;</span><span class="p">]</span>
+
+    <span class="k">def</span> <span class="nf">get_tabledata</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">dataset_id</span><span class="p">,</span> <span 
class="n">table_id</span><span class="p">,</span>
+                      <span class="n">max_results</span><span 
class="o">=</span><span class="kc">None</span><span class="p">,</span> <span 
class="n">page_token</span><span class="o">=</span><span 
class="kc">None</span><span class="p">,</span> <span 
class="n">start_index</span><span class="o">=</span><span 
class="kc">None</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Get the data of a given dataset.table.</span>
+<span class="sd">        see 
https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list</span>
+
+<span class="sd">        :param dataset_id: the dataset ID of the requested 
table.</span>
+<span class="sd">        :param table_id: the table ID of the requested 
table.</span>
+<span class="sd">        :param max_results: the maximum results to 
return.</span>
+<span class="sd">        :param page_token: page token, returned from a 
previous call,</span>
+<span class="sd">            identifying the result set.</span>
+<span class="sd">        :param start_index: zero based index of the starting 
row to read.</span>
+<span class="sd">        :return: map containing the requested rows.</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="n">optional_params</span> <span class="o">=</span> <span 
class="p">{}</span>
+        <span class="k">if</span> <span class="n">max_results</span><span 
class="p">:</span>
+            <span class="n">optional_params</span><span 
class="p">[</span><span class="s1">&#39;maxResults&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span class="n">max_results</span>
+        <span class="k">if</span> <span class="n">page_token</span><span 
class="p">:</span>
+            <span class="n">optional_params</span><span 
class="p">[</span><span class="s1">&#39;pageToken&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span class="n">page_token</span>
+        <span class="k">if</span> <span class="n">start_index</span><span 
class="p">:</span>
+            <span class="n">optional_params</span><span 
class="p">[</span><span class="s1">&#39;startIndex&#39;</span><span 
class="p">]</span> <span class="o">=</span> <span class="n">start_index</span>
+        <span class="k">return</span> <span class="p">(</span>
+            <span class="bp">self</span><span class="o">.</span><span 
class="n">service</span><span class="o">.</span><span 
class="n">tabledata</span><span class="p">()</span>
+            <span class="o">.</span><span class="n">list</span><span 
class="p">(</span>
+                <span class="n">projectId</span><span class="o">=</span><span 
class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">,</span> <span 
class="n">datasetId</span><span class="o">=</span><span 
class="n">dataset_id</span><span class="p">,</span>
+                <span class="n">tableId</span><span class="o">=</span><span 
class="n">table_id</span><span class="p">,</span> <span 
class="o">**</span><span class="n">optional_params</span><span 
class="p">)</span>
+            <span class="o">.</span><span class="n">execute</span><span 
class="p">()</span>
+        <span class="p">)</span>
+
+    <span class="k">def</span> <span class="nf">run_table_delete</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">deletion_dataset_table</span><span class="p">,</span> <span 
class="n">ignore_if_missing</span><span class="o">=</span><span 
class="kc">False</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        Delete an existing table from the dataset;</span>
+<span class="sd">        If the table does not exist, return an error unless 
ignore_if_missing</span>
+<span class="sd">        is set to True.</span>
+
+<span class="sd">        :param deletion_dataset_table: A dotted</span>
+<span class="sd">        
(&lt;project&gt;.|&lt;project&gt;:)&lt;dataset&gt;.&lt;table&gt; that indicates 
which table</span>
+<span class="sd">        will be deleted.</span>
+<span class="sd">        :type deletion_dataset_table: str</span>
+<span class="sd">        :param ignore_if_missing: if True, then return 
success even if the</span>
+<span class="sd">        requested table does not exist.</span>
+<span class="sd">        :type ignore_if_missing: boolean</span>
+<span class="sd">        :return:</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+
+        <span class="k">assert</span> <span class="s1">&#39;.&#39;</span> 
<span class="ow">in</span> <span class="n">deletion_dataset_table</span><span 
class="p">,</span> <span class="p">(</span>
+            <span class="s1">&#39;Expected deletion_dataset_table in the 
format of &#39;</span>
+            <span class="s1">&#39;&lt;dataset&gt;.&lt;table&gt;. Got: 
</span><span class="si">{}</span><span class="s1">&#39;</span><span 
class="p">)</span><span class="o">.</span><span class="n">format</span><span 
class="p">(</span><span class="n">deletion_dataset_table</span><span 
class="p">)</span>
+        <span class="n">deletion_project</span><span class="p">,</span> <span 
class="n">deletion_dataset</span><span class="p">,</span> <span 
class="n">deletion_table</span> <span class="o">=</span> \
+            <span class="n">_split_tablename</span><span 
class="p">(</span><span class="n">table_input</span><span 
class="o">=</span><span class="n">deletion_dataset_table</span><span 
class="p">,</span>
+                             <span class="n">default_project_id</span><span 
class="o">=</span><span class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span><span class="p">)</span>
+
+        <span class="k">try</span><span class="p">:</span>
+            <span class="n">tables_resource</span> <span class="o">=</span> 
<span class="bp">self</span><span class="o">.</span><span 
class="n">service</span><span class="o">.</span><span 
class="n">tables</span><span class="p">()</span> \
+                <span class="o">.</span><span class="n">delete</span><span 
class="p">(</span><span class="n">projectId</span><span class="o">=</span><span 
class="n">deletion_project</span><span class="p">,</span>
+                        <span class="n">datasetId</span><span 
class="o">=</span><span class="n">deletion_dataset</span><span 
class="p">,</span>
+                        <span class="n">tableId</span><span 
class="o">=</span><span class="n">deletion_table</span><span class="p">)</span> 
\
+                <span class="o">.</span><span class="n">execute</span><span 
class="p">()</span>
+            <span class="bp">self</span><span class="o">.</span><span 
class="n">log</span><span class="o">.</span><span class="n">info</span><span 
class="p">(</span><span class="s1">&#39;Deleted table </span><span 
class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span 
class="s1">.</span><span class="si">%s</span><span 
class="s1">.&#39;</span><span class="p">,</span>
+                          <span class="n">deletion_project</span><span 
class="p">,</span> <span class="n">deletion_dataset</span><span 
class="p">,</span> <span class="n">deletion_table</span><span class="p">)</span>
+        <span class="k">except</span> <span class="n">HttpError</span><span 
class="p">:</span>
+            <span class="k">if</span> <span class="ow">not</span> <span 
class="n">ignore_if_missing</span><span class="p">:</span>
+                <span class="k">raise</span> <span 
class="ne">Exception</span><span class="p">(</span>
+                    <span class="s1">&#39;Table deletion failed. Table does 
not exist.&#39;</span><span class="p">)</span>
+            <span class="k">else</span><span class="p">:</span>
+                <span class="bp">self</span><span class="o">.</span><span 
class="n">log</span><span class="o">.</span><span class="n">info</span><span 
class="p">(</span><span class="s1">&#39;Table does not exist. 
Skipping.&#39;</span><span class="p">)</span>
+
+
+    <span class="k">def</span> <span class="nf">run_table_upsert</span><span 
class="p">(</span><span class="bp">self</span><span class="p">,</span> <span 
class="n">dataset_id</span><span class="p">,</span> <span 
class="n">table_resource</span><span class="p">,</span> <span 
class="n">project_id</span><span class="o">=</span><span 
class="kc">None</span><span class="p">):</span>
+        <span class="sd">&quot;&quot;&quot;</span>
+<span class="sd">        creates a new, empty table in the dataset;</span>
+<span class="sd">        If the table already exists, update the existing 
table.</span>
+<span class="sd">        Since BigQuery does not natively allow table upserts, 
this is not an</span>
+<span class="sd">        atomic operation.</span>
+
+<span class="sd">        :param dataset_id: the dataset to upsert the table 
into.</span>
+<span class="sd">        :type dataset_id: str</span>
+<span class="sd">        :param table_resource: a table resource. see</span>
+<span class="sd">            
https://cloud.google.com/bigquery/docs/reference/v2/tables#resource</span>
+<span class="sd">        :type table_resource: dict</span>
+<span class="sd">        :param project_id: the project to upsert the table 
into.  If None,</span>
+<span class="sd">        project will be self.project_id.</span>
+<span class="sd">        :return:</span>
+<span class="sd">        &quot;&quot;&quot;</span>
+        <span class="c1"># check to see if the table exists</span>
+        <span class="n">table_id</span> <span class="o">=</span> <span 
class="n">table_resource</span><span class="p">[</span><span 
class="s1">&#39;tableReference&#39;</span><span class="p">][</span><span 
class="s1">&#39;tableId&#39;</span><span class="p">]</span>
+        <span class="n">project_id</span> <span class="o">=</span> <span 
class="n">project_id</span> <span class="k">if</span> <span 
class="n">project_id</span> <span class="ow">is</span> <span 
class="ow">not</span> <span class="kc">None</span> <span class="k">else</span> 
<span class="bp">self</span><span class="o">.</span><span 
class="n">project_id</span>
+        <span class="n">tables_list_resp</span> <span class="o">=</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">service</span><span class="o">.</span><span 
class="n">tables</span><span class="p">()</span><span class="o">.</span><span 
class="n">list</span><span class="p">(</span><span 
class="n">projectId</span><span class="o">=</span><span 
class="n">project_id</span><span class="p">,</span>
+                                                      <span 
class="n">datasetId</span><span class="o">=</span><span 
class="n">dataset_id</span><span class="p">)</span><span 
class="o">.</span><span class="n">execute</span><span class="p">()</span>
+        <span class="k">while</span> <span class="kc">True</span><span 
class="p">:</span>
+            <span class="k">for</span> <span class="n">table</span> <span 
class="ow">in</span> <span class="n">tables_list_resp</span><span 
class="o">.</span><span class="n">get</span><span class="p">(</span><span 
class="s1">&#39;tables&#39;</span><span class="p">,</span> <span 
class="p">[]):</span>
+                <span class="k">if</span> <span class="n">table</span><span 
class="p">[</span><span class="s1">&#39;tableReference&#39;</span><span 
class="p">][</span><span class="s1">&#39;tableId&#39;</span><span 
class="p">]</span> <span class="o">==</span> <span 
class="n">table_id</span><span class="p">:</span>
+                    <span class="c1"># found the table, do update</span>
+                    <span class="bp">self</span><span class="o">.</span><span 
class="n">log</span><span class="o">.</span><span class="n">info</span><span 
class="p">(</span>
+                        <span class="s1">&#39;Table </span><span 
class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span 
class="s1">.</span><span class="si">%s</span><span class="s1"> exists, 
updating.&#39;</span><span class="p">,</span>
+                        <span class="n">project_id</span><span 
class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> 
<span class="n">table_id</span>
+                    <span class="p">)</span>
+                    <span class="k">return</span> <span 
class="bp">self</span><span class="o">.</span><span 
class="n">service</span><span class="o">.</span><span 
class="n">tables</span><span class="p">()</span><span class="o">.</span><span 
class="n">update</span><span class="p">(</span><span 
class="n">projectId</span><span class="o">=</span><span 
class="n">project_id</span><span class="p">,</span>
+                                                        <span 
class="n">datasetId</span><span class="o">=</span><span 
class="n">dataset_id</span><span class="p">,</span>
+                                                        <span 
class="n">tableId</span><span class="o">=</span><span 
class="n">table_id</span><span class="p">,</span>
+                                                        <span 
class="n">body</span><span class="o">=</span><span 
class="n">table_resource</span><span class="p">)</span><span 
class="o">.</span><span class="n">execute</span><span class="p">()</span>
+            <span class="c1"># If there is a next page, we need to check the 
next page.</span>
+            <span class="k">if</span> <span 
class="s1">&#39;nextPageToken&#39;</span> <span class="ow">in</span> <span 
class="n">tables_list_resp</span><span class="p">:</span>
+ 

<TRUNCATED>

Reply via email to