Reference

Source code


<?xml version="1.0" encoding="utf-8"?>
<ApiConfig Name="Google BigQuery" Desc="Read and write Google BigQuery data effortlessly. Query, integrate, and manage datasets, tables, and jobs — almost no coding required."
Slug="google-bigquery-connector" Id="581fed19-75c7-4b95-9374-5035954e730f"
Logo="data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAJYAAACWCAYAAAA8AXHiAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAABeRSURBVHhe7Z1pcFNXlsfpSU1PaqqmM12Vmur5MOmqqekPXTVTna6ZhBgv2mwCxiFsDgWBsKQDgSyEsHgBW7YWbxgMGIwdzOo4BLMYAgQSEiBpQiAQSCCBYLwveF8ka5fsM/c8XceLruRNy3vy+1f9CmO996R3z1/nnHvfkzxJlChRokSJEiVKlChRokSJEiVKlChR3tMHX9qOvP8hYp+wqEttR45dd6ylQyJqLProqmVmyknH+fm7HBCm7gGJpgekWhEcC2laDyhPOGBZvnURHS5Rw2nJgaoni646IFztAEV6j4gH0GjTsnpg2znb/9HhE+VOkRm2Khl5R7IGUoRNJGFyimM+HUJRAxVbAk+EptrL5WkOEBk9Eo0D0k7bywHgCTqkomIL4KlwFXvAREYOmisizfYcHVZRiUdt7ayBEhk92JsmHrNPp0M7cbXqoPW1CDLjYw2SyNiI2Wr7nA7vxFW4ynaRNTgiYydCTWaK5+2FdIgnno5+a2+MIH0Ba3BExsereQ6gwzyxpDpu+ZNUY7ewBkVk/Ei1Dli133aEDvfE0dzt1iwZY0BEvAdp5HWzt3T/Gx3y4NfZ27bkUG55wS7iQ2RaO2hLrROnJK4+YG9hDYSI98H1wdQT5tV06INXiUdt70o07EEQ8Q3RW+xX6fAHp1YUwD+Gpjrus05exHeEq+1w+O/2GzQMwafTt+wmKan7rJMX8S2zcmw9BZf1T9NQBI92f26ThKlszJMW8T34hl5fbDtNwxE8Iu+YvXiCijSbSIAITbXBvF3m/6IhEb5e3dH2uzCVaCo+oDppD57lh9f2WNtZJynifyLUVth5wZpGQyNcbT1nS5Bp2ScpEhimptt+ouERppYX9v5LSIrVzDo5kcAh1dhgZrbhrzRMwtOUVLMMTyRShHfItbbe4zdNoTRUwtHRa9ZX5For86REAg++4d8rcpyj4RKOYraYTzlPAs0lwkfCUy3w1n7r/9CQ8V+xSvithMw+WCcjwi9Cks05NGz81lf3e/99To7FoGCchAj/kJF2pfiqZR8NH3+VVmpVK7jeSkQoSDWWprgSeIqGkH9SlsK/hpG6zXrxIvwFJ1nTt5jlNIz8k0xrfkcsgcIEV+Q/v+dYTEPJH52/Y1sZlS7OAoUKti8rCi2XaTj5IQD4TXSG6aZCi2VQRKhEqCzw/kdW/qzI4xd6yDTsFysiLF5QWi7QsAZeM7IwnbJfqIiwwKpz9ntbKQ1t4JTwsSVXLIHBhURjg+0X4RkaYv9rx6e9v5OoTcwXJyJsFGrTEhpm/2t6hqVAzFbBCw2z/zUtM/h7K3zjDIW1XbCB57k4z/w9DbV/9Mkt+OeYLFOLQmsmLyI4kJNzkWvMoCA/R2eaYd52CyzNt8C7hyywvtjKsYb8vLzAAq/ssEB0lhmi0p374L6sYwodGTm39FN+vCEwSmN4FgPAejFCQ6q2QGiKBd7cZ4HTt+zQ0NELRiuA1Q7g6AHo6QXopeDP+Dt8zES2adX3wvkf7LDuQ7zeZoZwFfs5hExEqqmeht33mrUV0yX7hQgBfFNI1GZ4+6AFLvxoh8bOXughhhmr0HTt3b1w5b4DEj62cu90Obeux35+oXHoa+snNPS+05qD5iPOPoP9IvgMlm4sX2sOW+FWZQ9nCF/oYWMPKI9Z4cUMS3CUSa0Jjlyx/JlawPs6cBmeFHJf9TLJtHeqx5GaRqmq1l5YsAt7FeFnL0mqOYXawPuavc38jRCNJSWBLbhkB73JRynKgyykHzt508F8XUJiKsn01Abe1cI8+P20DPaT8hV8E8zOMcNXDxw0zIHTD9UOWJQn3NKIY7kg11R1mVQtaonx6+KtjqdmbcUVdvaT8hEcCCx91aQcjVQ469OZeqCqxQ6f3THCjrNdEFfUBm990AqrC1ph4+E22PZJF5y5aYSKJjt0Gnq4WeJI1W7A0ihcc8lJb516zDaZ2mL8mrvdtEihQWMJAwVpNueQTNVGZmoj1fUyC7y3rwUUyscg2dwAEZvqIXwT/suAPi5Pfgxv7GmBiz8YRzwRMNsA3jpgIX0X+7XzHZz8UFuMX/O2o2PZT8RHFu22wOPO4SNtsfVC6Q0DLN7RwhkmLLGBM9VoQPOFJtbDvKwmOHxFT/q44VNYp7EX3j6IN9exXz+fkWuMQKrX+P+c3bIPLEfkAhqAaRkmuN/gObiYXcoe2+CNvBZ4Ia6eaZixEBLfAPOzm+D7CuuwJRKNPzNbWG/YPqIzTXDmtvUv1CJj07QMYy/r4HwkQmWGG+XDN+onvzXAlATvGWook4lZsT8bTvUdPSQDGJjnwnciUg1j/w75pfkmHddbYcbiOTK1CVQnrWD34CvMVGnHO7nSxTKEN8Gy+t7+NtJTuS/J+Hr2f9kNUlU385z4zIukMlCbjE5v7jeF4wVW1kH5BvYq83daPJYfDGLK0Q4ITWAbwRegubDcGizuzYV6s6CNvDGEZS4c89idVh21y8g1fweayigIwlNNcPa2nYbJVXgdMO1EJwl0HQk4Ziv/EU5mj6s/aAOb3b25bpZb4IX4eojEssg4P76C1ez1fOMcapnhNT29ew2WFtbB+MiKQs/Z6tz3JphCAscKvD8II6V3zwX3PRdm06Qj7dy2il8zF/tc+cbM7FGUxNjtwjkxzFaXfnLfWJU32iBSyQ64P8F1r2sPLfRVueqXehuZVdaBNOkxMZeeea58BNfjFucZM6h13GvtIfM9hYZ9ED4yPcMI3Wb3Zeb9A21cUFnB9jcLc5q47MSSzdEL87c2ca9VmtRAyoxwymJMlgmuPrQ+Sy3kqvm7Tf9B6ryDtTMfkZE3wMGv3fdWd6utXI/DCnIgwB7vzE0DfXWuuvKTCcK5GasToZgLE9GsrYZ71EauWrUf73Vm78xHotKM0KJnN1eYAZbsdGaAgcEdKeGb6mBVQQvX+wxkAck6YzWr8zJQPXc9kiXsE2O0eEnJuT2XubiyyP/WZCqJxcJdpj9SK/Ur4ahtlUQlrFnJK2TmircJs1TXZh9XtpqSUAdX75vo0fqV+2kXhJLHWPuMhBAyibhRZqZHc9U7ha2D3gzOngsbevYY8Iml+Yw/Ybd4t7BMJVcbIOmYlYbDVfu/0I2rt0Jjff2z942FKD9up0dz1f4vddxzD9qHZC4hLEXISExeL7CspZaaNGldsTUTfxmpFQ6YXU9+x05XWFKcjTAGaGx4NlYtc5+RMk3VAEY3i6bfPTLDC3Gux5cm4VJE32yRPSZ8YNHuAcsPMzIND1gb8RnMWGWN7F4FyyBO3YcGZzT40liTiXFuV7CXHtpIz4h9GGu//p6LPSZ8AC9PqU5Y5nHGis5kb8RnpmeYwGhlv+uvPzTD8xvHF3xfGgsz0vFr3fSIg4U3G87OwAaevS+XuTTYc7HHhQ/Itaazk87/YK92TmuFxdwc16D36dwtA0yJ56+xwhNrYefZTnpEVy3f1czcrw8pgc/mWphrME1aWYhTWfYGfObVXe6NVXRFT4LHDgqyck8zvJk/mBkavHDcv40vjYW9n6rEfQO/Zl8Lc7+B9JdF/vVcL2cbYVIkz9OqO5blu5+y771IjMXNCF0DEqmsYy5RbC5up7NI53a+NBaSUNRKj+iqDYdamfsMha8919Q0AzWWAFm6x33GKrzYxZUb6WZX0Fh6xiUgDHTEpv7t0DwsY+36tBPCyGMDjzkWPBlr46EW5j7uiNQQczHGKFBEabth0qyt7Af5ziIPpfDjv2MpdBMEHxhLQnghroZryvupAcmA4w0En0d7vIMe0VXv7G1m7ucOWRLpudRdzozBGCt/M4NMBiftu2xxNoICY/Y295+MwY9uhZDAsoLgyVgDjTAaY72YWgcX7hjgix+Ng5if3TBouz7Q9AWfub+NBi9FsfbzhIxb59Ixx8rfLNhJjIWKYjzId6alG6DLwF7Hul1pgckbRm+ssWasGWrs21yPuSy3cdB2fWBGO3uTvdxgcwDEaOqZ+w0LyVx8KIsRKfqznLEW5JKZIWMDPoMLcXdr2cZq1fW47YP4YCzMpr/Usy9HNXT0DHodoyapNqCZS67uhs1HTU5jLcztPstd6GRsyFckqXoovmqj4RgsLJEr9zQxexxflMLRGmtu5mPu7guWLt8zuS3jI8VZFknPxRg3XzM9g5bBPk1LZ2/IV/Cdsa7IfZ91/rbBrbG6GdfpfJGxlu50NRa+ptxz7vurnec6yXOPz1gINvSRGv9nLoXWuIpayimZqnttlFZPporCYfZWA1jYSQva9A6QbsIADUZCfnenyvU6XUJR3xTfuV1ofI1bY4UmVP+6HRKtcjVWRZONzBb7t+nj+Q3V8MBNGUQt57Kc635jIqkGIklZZI2dLwhN7oJY5U+/pZZyqviK5U8vphmYO/CXbqhsZt/vjtfc4g61kBnY4MFGY83U1kOXcXB/hsZyZiznduMxFn4i59Vtj7nnGrhdBPl/bFY9M7uhTNZemJqCl2wG7zceZFzP1ckYO++Ck4Z1h41vUzsNVrS644+snfiKgpyMptT9BxQaO3uYgcJMgkYyWPrNhf93lk7nNmM1Fn4fhLoE77Pvf3wgP9e6z1YfcpeiBh/bG3BlkfRcrDH0BpGEl7IMncWX9U9TK7kqPKXrHmtnvkKmttCiY2cA1NbT7S5ZCwlLqILUo21gx9RG5I2MhfeB7TjTwR174ON9vLO3kR7FVXh/VlQK9mCu+3kFkrl8VRZlKt3wHwHTlBqelfuxLo8XmVoPx751nwXw/vJ5WbhQ6TrYmFVWFTRxpWtTcV/z7nxsNMbqa97XH0Rz9v9+ILiI+rjD/cfUvntkIcdl7+s1uLLo/cwlU3cVUft41sq9hmtYM1kH4SOv7DAQA7nPWvhpHXdZBLNZLDHewq0Ng7LFaIylUNbCArI/KzMiIeRYuDLvTmjK5bmPmft6G2dZ9F7PJU3VQe5584vUOsML+xfWgfiITKWHvItupodUF+4YQcYYaHeMxlieQLN6unyDOnWjG0LivN9buQMbem+URUw+uz+3PKKWGZkitZ1zcB0kSiDEZOo9fnAVdfSqHkI2VpLBxSB6JmJTNSR91AaX7ppIxrPAvRoLfPmjEd7+AO+lZ+8zlND4Kth+poubobqTjfRls9NxgsE+hi/5taFnjOdICEnq6p62o+yfqGVGpl2X4Q9kmmpmHZCP4GWMpXuMbj+k0Ce8uxSNIRmBOSISq0gJxX8H/8zadihYeg9d6nK7gIuyO3oh/jAuiWCZZh/Hl8g2Y+bqYI7ncMhJw77usG4ZtcvodP5H+xEhZS0018ErVo8ZAoVT/vnZ9SM2yWjAY8ZoauHSPdcyOlQP6m2cWVnH8RfOskh6LsZ4emLBTsP4/hTKlKTOI6wD85UwZRdcvOvh29cGKOeTDpi8gZRGzF5eAI+VQDLQSIVfr5RY1EJKJvt4/iRKQ8oiYzxZRKR0wRc/2z+kFhmb1h42/EWSwn4CvhKdoYdrZcObCxNbVbMdtpR2gCKZzN6w/2IMuifQFBFkNpj0UQs8qBv+u0eHCr+BMPNEG0yJ857BxwRp6keauaZqu7KoPcanNYeMn7GegK9g+Z6eroPvKkaWuVAtOgccv6aH9QeaYE5GHchJmcA+CRvwgYQRI+Hscqa2Dt4tbILDl3VQ0+r+i0lGIlxHyyDmwuMzg+4nnD2XZ3OFKXWQesL4HLXG+PT1L/ZZmP5YT8RnsCxe+sk+bM/FUouuB779xcyZDT+uv49Q8o0evnlggsftIzfsSIWZC8tiOK63MYLuN8ibxl1ZxB523yVLFbWFdzQ3RzeZW7XlnlQ44GvOPmPkyl4ghUsdlc2e19qwjGpKWmlZRIMFDudscfBYhio7bsfGwhPUEt6R8gA8Ganuqhr6ZEJAruqEFXktZDboObC+EH7cf8PBJpKJKmFeJl7W8Vw2sSxmlfb1XOyg+wMs95GqfnPJVF2wel/3AmoH7+rYDXsuPsHAoAmFqVodSfO1kH+hE1pJP+XrDIa35ZR8000CVAUSXKciwcK1sMjkajJh8GxwvLs05UgrRHBlMXBw5uJ6ri54bbe+gtrANwpJak8ZGjQhgV+TLU0sh8KLOjC7+e6H8Qg/DHHqejcokqq5LMUK2FRlNTS0e85cuBSBM00+lMXQpA44dt2WSC3gG63aq/9zeIrTxYKE9FyypAYy46uEaBXel9UMX94dfsXek7B8XSONvupoK7ycVksMVQESRoD6wAwWm1UL9cOYC1fms04Gtizia1UkV26g4fet1hd1fxLJCpqAwA8c4MBhecL1q2hVLemFmqH4qy4y8zNztw9jVmnvdnC33uAf0cSfsUfCv8OD38qHpW5zcQvM1NZCaFy589LMkMC4A5/3JU0NVLcMUxaJadOOt3Hbs47ja8LiK41xhxqeoaH3rU58Z/3vqWmdPayACQcdl7mGDiQGcMrGR/Dc+grOcLi2hNkHwdIWsoE8tq6c+1eSOL5MwmUDUjKxwfckOymLK/Iaf+3V/AUufWSfar5Lw+4fzc02/FVGZltRGgGDZTEZMxcaJDCgOeek1w5rrsyT7RAeX8E8hq9Ysr1++DtDfaHo9K7vmQETGIE2VwQx19wM9z0XLsrKkzCLsPf3BaQEwgxV5Uwaav9q1+e2xIgUXOtgB0w4YOaqYw6wv8DMpSDmGdpzoalC/ZypkCU51eU0zIFR9lkzI1DCJOBlkTBDXfNr5rr+0ASyzextfUkImYXmnGkNTLbq0+IC0zPhymDIWgQ1miuwmQvLIs4W39/fCFHJOGlgb+cr8Pm3nW4NTG81VOuKuku5D0WygiVAAm2uQCLbXNWkVMI/0NAGViUAT8zf0dXDCpIwCfxsMRDgwm5K8eMzNKz80OEr+gvSVCyJwYPTXNg4TwxW5jXwowQOVXRGx2VWgISMLLmWGYRgY0pcBczQ1oTTUPJLmaWmxdJULCXsIAmTvp6LHZBgQJJYAQu2VHv3Bj5vK+dTMyi4m8SCi2Aui5Fk9qksbvpPGkJ+Kjaj46kIZXsTKziChrxZgjFzYcMef7DhPg0fv7Vqb/c+BXf3YbCBZTG4ei755opqGjZh6G8F3YzABAPB03OFxVfAun0NW2jIhKHt581bJNx1xCCEK4vYc5XTIOG/QqMCXs+t5efywnB6KaPtXJSmnQQjOFEk1TACJgzCEx7BO/l1MhoqYWn9h/pQaUpHLysowUEHyLmeix08voLLC0tyaitpmISpzFMmY6S6jRGU4EEhIHPJEh9BtKpSmCVwqF7JwUVTdlCCBaFkLiyBq/bUltDQCFtLdnWmKVTsgAQPHYLouWanVQVHturT63mdMFUT3CUR4XNZjIh/BCv31LK/i12oSj5qWCJRtnHmCmaiSD+JX4mEvQyfkBKWba8JrmzVp5j09tM48KyABBNRBPxYFyvAgSIqqRxWZDe4/4J/IWtFgf7p6PS2nqloriCHK4s8MZeENOwxmqqvaBiCU+8d6LgRmcoORrDBlUUemCs6tSI4S+BQLdxBGvkhQQhWAm2ucNKwL95anUyHPrgVk9mxOkrVygxEMBLIshijmiDZqk+LcruYQQhWApG5pKS3WlNYH9jPB/pb7x40/u80DWatiUMUwWmuMp8jTSiDBdmVjXS4J5b+lq9rjORK4gSCW4qoYprBm0Qll0+sEjhQJSXwRHQ6Y/CDHT+YS574aD8d5omp5fm6wshUxuBPAHxVFmNUlXD2huFZOsQTV87lB/bgBzvezlyS+DKYpS2fQ4d2Yuu1PN10eUoLc+CDHi+WRWzYY9RVDXRYRaEUqa3PyVIYAz9B8EZZfP79X65PAvgNHVJRfZIpWxe+yC1BYPaaeDjN9XDURMQ/hNCNZefoMIpiSX28a/6UTc23I1XswQ9qyJvKWRbZBhqKNOEhKDaXwaJtNbfp8InyJPx+puPXdOXcgmJKMzsIQcxIzDV5/UN4K78OZqTX/L6kpMS7f99mIiiuqGPRlM2ti7Qnu+GtvR2wsmACkN8Ob+xuhKXbqwextrAeUj9uuilPKFukLGn+Ax0iUaJEiRIlSpQoUaJEiRIlSpQoUaJEjUmTJv0/jN5iAMBRgJYAAAAASUVORK5CYII="
           TrustedDomains="cloudresourcemanager.googleapis.com"
  Version="14"
  EngineVersion="20"
  >

  <VersionHistory>
     <Change Ver="14" Date="2025-11-18" Type="New">Updated examples for DateTime / Unix Timestamp values.</Change>
	 <Change Ver="14" Date="2025-11-18" Type="Fix">Fix to allow String DateTime values for Date field.</Change>
	 
     <Change Ver="13" Date="2025-03-04" Type="New">Expose timestamp column as DateTime (Alternate way to insert data in TIMESTAMP column).</Change>
     <Change Ver="13" Date="2025-03-04" Type="New">Added support for JSON Key file in Service Account authentication.</Change> 	  
     <Change Ver="12" Date="2025-01-24" Type="New">Added ImpersonateAs property for Service Account authentication (Requires Domain-wide delegation ON and needs Google Workspace account)</Change>      
     <Change Ver="12" Date="2025-01-24" Type="New">Added images in User Account and Service Account authentication instructions</Change>      
	 <Change Ver="12" Date="2025-01-21" Type="New">Updated MaxRequestSize to 10MB and Engine updated to calculate correct row size to avoid error 413 Request Entity Too Large in insertAll API call (Bulk Insert max 10000 rows or max 10MB size)</Change>
	 <Change Date="2024-06-05" Type="New">Added DefaultValue for Sql property so user can preview sample query without changing anything</Change>
	 <Change Date="2024-06-05" Type="New">Output BIGNUMERIC, BIGDECIMAL as string to avoid loss of data in large precision / scale</Change>
     <Change Date="2024-06-05" Type="New">Added Redirect URL property to support Web Application credentials (visible when show advanced option checked)</Change>
     <Change Date="2024-06-02" Type="Fix">read_table_data endpoint throws Invalid dataset ID error</Change>
	 <Change Date="2024-06-02" Type="New">Added support for TIMESTAMP columns to display as extra columns with DateTime along with original column</Change>
	 <Change Date="2024-01-02" Type="New">Added Project / Dataset Dropdown for easy selection on connection screen</Change>
     <Change Date="2023-04-11" Type="Fix">Location option doesn't work because the child endpoint in the get_query endpoint didn't pass the same location in the URL</Change>
     <Change Date="2023-03-17" Type="New">Added Job Location option for Query / Connection Level</Change>
     <Change Date="2023-01-05" Type="Fix">Duplicate rows returned in some cases</Change>
     <Change Date="2023-01-05" Type="New">Add support to call stored procedure without causing duplicate API calls (CRUD Operation)</Change>
     <Change Date="2023-01-05" Type="New">Add request caching logic so only one job request is fired</Change>
     <Change Date="2022-10-13" Type="Modified">Added retry logic on error code 429 (Too many requests).</Change>
     <Change Date="2021-10-26" Type="New">Added MaxRequestSize to auto adjust Request to avoid going over 10MB data limit for insert operation</Change>
     <Change Date="2020-05-15" Type="New">Added support for Service Account Credentials (Using P12 key file)</Change>
     <Change Date="2020-05-15" Type="Fix">UseCustomApp option doesn't work</Change>
     <Change Date="2020-01-27" Type="Fix">DateTime type is not detected correctly</Change>
     <Change Date="2020-01-12" Type="Modified">Added 30 Seconds Cache for get_query endpoint so same query is not executed twice (one for Metadata, one for Data)</Change>
     <Change Date="2020-01-12">Initial version</Change>
  </VersionHistory>

  <ServiceUrls>
    <ServiceUrl Name="https://www.googleapis.com/bigquery/v2" Url="https://www.googleapis.com/bigquery/v2" />
  </ServiceUrls>
  <Auths>
    <Auth Name="OAuth" 
          Label="User Account"
          Desc="User accounts represent a developer, administrator, or any other person who interacts with Google APIs and services. User accounts are managed as Google Accounts, either with Google Workspace or Cloud Identity. They can also be user accounts that are managed by a third-party identity provider and federated with Workforce Identity Federation."
          HelpLink="https://cloud.google.com/docs/authentication#user-accounts" 
          Type="OAuth"
          ConnStr="Provider=GoogleBigQuery;"
          TestEndPoint="list_projects">
      <Params>
        <Param Name="UseCustomApp" Value="True" Required="True" Hidden="False" Desc="Use your own app credentials or inbuilt app provided by ZappySys for ease of use. If you choose UseCustomApp=true then make sure to obtain your own ClientId and Secret using steps provided (Click [Steps to Configure] link found next to Authentication Type dropdown)" />
        <Param Name="ClientId" VisibleIf="UseCustomApp" RequiredIf="UseCustomApp" />
        <Param Name="ClientSecret" Secret="True" VisibleIf="UseCustomApp" RequiredIf="UseCustomApp" />
        <Param Name="Scope" Value="https://www.googleapis.com/auth/bigquery https://www.googleapis.com/auth/bigquery.insertdata https://www.googleapis.com/auth/cloud-platform https://www.googleapis.com/auth/cloud-platform.read-only https://www.googleapis.com/auth/devstorage.full_control https://www.googleapis.com/auth/devstorage.read_only https://www.googleapis.com/auth/devstorage.read_write" />

		<Param Name="ProjectId" Label="&#009;ProjectId (Choose after [Generate Token] clicked)" Value=""
			   OptionsEndPoint="list_projects" 
			   Example="bigquery-public-data"
			   OptionsEndPointValueColumn="projectId" 
               Required="True" Desc="Login to https://console.cloud.google.com/bigquery and choose Project dropdown at the top to see list of Projects. Over there you will find ProjectID next to ProjectName. You need to get ProjectID which has BigQuery API support enabled."
               />

        <Param Name="DatasetId" Label="DatasetId (Choose after [Generate Token] clicked and ProjectId selected)" Required="True" 
			OptionsEndPoint="list_datasets" 
			OptionsEndPointValueColumn="datasetId" 
			Example="samples"
			OptionsEndPointParameters="ProjectId=[$ProjectId$]" Value="" Desc="Default Dataset Name you like to use when listing tables (e.g. MyDataset)." />

        <Param Name="RetryMode" Value="RetryWhenStatusCodeMatch" Options="None;RetryAny;RetryWhenStatusCodeMatch;" Hidden="True" />
        <Param Name="RetryStatusCodeList" Value="429|503" Hidden="True" />
        <Param Name="RetryCountMax" Value="5" Hidden="True" />
        <Param Name="RetryMultiplyWaitTime" Value="True" Hidden="True" />
        <Param Name="Location" Template="LocationParam" />
        <Param Name="ReturnUrl" Label="Redirect URL (Only for Web App)" Options=";https://zappysys.com/oauth/" VisibleIf="UseCustomApp" Desc="Only specify this if you have created Credential as Web Application rather than Desktop. In Desktop App you dont have to supply Return URL (its always localhost). When you keep this blank it uses default value http://localhost:[some_random_port_each_time] for redirect_url)"  />
      </Params>
                  <Notes>
                <![CDATA[
<p>
    Follow these steps on how to create Client Credentials (User Account principle) to authenticate and access BigQuery API in SSIS package or ODBC data source:        
</p>
<div class="alert alert-warning">
    <strong>WARNING</strong>: 
    
    If you are planning to automate processes, we recommend that you use a <em>Service Account</em> authentication method.    
    In case, you still need to use <em>User Account</em>, then make sure you use a system/generic account (e.g. <code>automation@my-company.com</code>).
    When you use a personal account which is tied to a specific employee profile and that employee leaves the company, 
    the token may become invalid and any automated processes using that token will start to fail.   
</div>    

<h3>Step-1: Create project</h3>
<p>
    This step is optional, if you already have a project in Google Cloud and can use it.
    However, if you don't, proceed with these simple steps to create one:
</p>
<ol>
     <li>
          <p>
             First of all, go to <a target="_blank" href="https://console.developers.google.com/">Google API Console</a>.
          </p>
     </li>
	<li>
          <p>
             Then click <strong>Select a project</strong> button and then click <strong>NEW PROJECT</strong> button:
          </p>
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/start-creating-new-project-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Start creating a new project in Google Cloud"
		     title="Starting creating a new project in Google Cloud"
		     width="1000"
		     height="340" />
	</li>
	<li>
          <p>
             Name your project and click <strong>CREATE</strong> button:
          </p>
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/create-new-project-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Create a new project in Google Cloud"
		     title="Creating a new project in Google Cloud"
		     width="680"
		     height="360" />
	</li>
	<li>
          <p>
             Wait until the project is created:
          </p>          
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/wait-until-project-is-created-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Wait until project is created in Google Cloud"
		     title="Waiting until project is created in Google Cloud"
		     width="640"
		     height="390" />
	</li>
     <li>
        Done! Let's proceed to the next step.
     </li>
</ol>



<h3>Step-2: Enable Google Cloud APIs</h3>
<p>
    In this step we will enable BigQuery API and Cloud Resource Manager API:
</p>
<ol>
	<li>
          <p>
             Select your project on the top bar:
          </p>          
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/select-project-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Select project in Google Cloud"
		     title="Selecting project in Google Cloud"
		     width="1000"
		     height="400" />
	</li>
     <li>
          <p>
              Then click the <strong>"hamburger"</strong> icon on the top left and access 
              <a target="_blank" href="https://console.cloud.google.com/apis/dashboard">APIs & Services</a>:
          </p>
          <img src="https://cdn.zappysys.com/api/Images/authentication/google/access-apis-and-services-in-google-cloud.png"
               loading="lazy"
               decoding="async"
               class="img-thumbnail block"
               alt="Access APIs and services in Google Cloud"
               title="Accessing APIs and services in Google Cloud"
               width="990"
               height="440" />
    </li>
    <li>
        <p>
            Now let's enable several APIs by clicking <strong>ENABLE APIS AND SERVICES</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/enable-api-for-project-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Enable API for project in Google Cloud"
             title="Enabling API for project in Google Cloud"
             width="800"
             height="300" />
    </li>
    <li>
        <p>
            In the search bar search for <code>bigquery api</code> and then locate and select <strong>BigQuery API</strong>:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/search-for-api-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Search for API in Google Cloud"
             title="Searching for API in Google Cloud"
             width="1000"
             height="390" />
    </li>
    <li>
        <p>
            If <strong>BigQuery API</strong> is not enabled, enable it:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/enable-google-bigquery-api.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Enable Google BigQuery API"
             title="Enabling Google BigQuery API"
             width="800"
             height="350" />
    </li>
    <li>
        <p>
            Then repeat the step and enable <strong>Cloud Resource Manager API</strong> as well:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/enable-cloud-resource-manager-api.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Enable Cloud Resource Manager API"
             title="Enabling Cloud Resource Manager API"
             width="800"
             height="350" />
    </li>
    <li>
        Done! Let's proceed to the next step.
    </li>    
</ol>



<h3>Step-3: Create OAuth application</h3>
<ol>
    <li>
        <p>
            First of all, click the <strong>"hamburger"</strong> icon on the top left and then hit <strong>VIEW ALL PRODUCTS</strong>:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/view-all-products-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="View all products in Google Cloud"
             title="Viewing all products in Google Cloud"
             width="990"
             height="700" />
    </li>
    <li>
        <p>
            Then access 
            <a target="_blank" href="https://console.cloud.google.com/auth">Google Auth Platform</a>
            to start creating an OAuth application:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/open-google-auth-platform-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Open Google Auth Platform in Google Cloud"
             title="Opening Google Auth Platform in Google Cloud"
             width="1000"
             height="430" />
    </li>
    <li>
        <p>
            Start by pressing <strong>GET STARTED</strong> button:
        </p>        
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/start-creating-app-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Start creating an app in Google Cloud"
             title="Starting creating an app in Google Cloud"
             width="1000"
             height="340" />
    </li>    
    <li>
        <p>
            Next, continue by filling in <strong>App name</strong> and <strong>User support email</strong> fields:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/fill-app-info-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Fill app info in Google Cloud"
             title="Filling app info in Google Cloud"
             width="850"
             height="440" />
    </li>
    <li>
        <p>
            Choose <strong>Internal</strong> option, if it's enabled, otherwise select <strong>External</strong>:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/choose-app-audience-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Choose app audience in Google Cloud"
             title="Choosing app audience in Google Cloud"
             width="850"
             height="590" />
    </li>
    <li>
        <p>
            <em>Optional step if you used <code>Internal</code> option in the previous step</em>. 
            Nevertheless, if you had to use <code>External</code> option, then click <strong>ADD USERS</strong> to add a user:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/add-test-user-in-google-cloud-app.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Add test user in Google Cloud app"
             title="Adding test user in Google Cloud app"
             width="1000"
             height="990" />
    </li>
    <li>
        <p>
            Then add your contact <strong>Email address</strong>:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/enter-app-contact-info-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Enter app contact info in Google Cloud"
             title="Entering app contact info in Google Cloud"
             width="850"
             height="510" />
    </li>
    <li>
        <p>
            Finally, check the checkbox and click <strong>CREATE</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/create-app-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Create app in Google Cloud"
             title="Creating app in Google Cloud"
             width="850"
             height="580" />
    </li>
    <li>
        Done! Let's create Client Credentials in the next step.
    </li>
</ol>



<h3>Step-4: Create Client Credentials</h3>
<ol>
    <li>        
        <p>
            In 
            <a target="_blank" href="https://console.cloud.google.com/auth">Google Auth Platform</a>, 
            select <strong>Clients</strong> menu item and click <strong>CREATE CLIENT</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/start-creating-app-client-in-google-cloud.png"
	       loading="lazy"
	       decoding="async"
	       class="img-thumbnail block"
	       alt="Start creating app client in Google Cloud"
	       title="Starting creating app client in Google Cloud"
	       width="1000"
	       height="340" />
    </li>
    <li>
        <p>
            Choose <code>Desktop app</code> as <strong>Application type</strong> and name your credentials:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/create-oauth-app-client-in-google-cloud.png"
	       loading="lazy"
	       decoding="async"
	       class="img-thumbnail block"
	       alt="Create OAuth app client in Google Cloud"
	       title="Creating OAuth app client in Google Cloud"
	       width="1000"
	       height="500" />
    </li>
    <li>
        <p>
            Continue by opening the created credentials:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/view-app-client-credentials-in-google-cloud.png"
	       loading="lazy"
	       decoding="async"
	       class="img-thumbnail block"
	       alt="View app client credentials in Google Cloud"
	       title="Viewing app client credentials in Google Cloud"
	       width="1000"
	       height="370" />
    </li>
    <li>
        <p>
            Finally, copy <strong>Client ID</strong> and <strong>Client secret</strong> for the later step:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/user-account/use-client-id-and-secret-to-read-google-rest-api-data.png"
	       loading="lazy"
	       decoding="async"
	       class="img-thumbnail block"
	       alt="Use client ID and secret to read Google REST API data"
	       title="Using client ID and secret to read Google REST API data"
	       width="770"
	       height="360" />
    </li>
    <li>  
        <p>
            Done! We have all the data needed for authentication, let's proceed to the last step!
        </p>
    </li>    
</ol>




<h3>Step-5: Configure connection</h3>
<ol>
    <li>
        <p>
            Now go to SSIS package or ODBC data source and use previously copied values in <strong>User Account</strong> authentication configuration:
        </p>
	   <ul>
            <li>In the <strong>ClientId</strong> field paste the <strong>Client ID</strong> value.</li>
            <li>In the <strong>ClientSecret</strong> field paste the <strong>Client secret</strong> value.</li>            
        </ul>
    </li>
    <li>
        <p>
            Press <strong>Generate Token</strong> button to generate Access and Refresh Tokens.
        </p>
    </li>    
    <li>
        <p>
            Then choose <strong>ProjectId</strong> from the drop down menu.
        </p>
    </li>
    <li>
        <p>
            Continue by choosing <strong>DatasetId</strong> from the drop down menu.
        </p>
    </li>
    <li>
        <p>
            Finally, click <strong>Test Connection</strong> to confirm the connection is working.
        </p>
    </li>
    <li>
        <p>
            Done! Now you are ready to use Google BigQuery Connector!
        </p>
    </li>        
</ol>  
]]>
            </Notes>
    </Auth>

      <Auth Name="ServiceAccount" 
            Label="Service Account (Using *.json OR *.p12 key file)" 
            Desc="Service accounts are accounts that do not represent a human user. They provide a way to manage authentication and authorization when a human is not directly involved, such as when an application needs to access Google Cloud resources. Service accounts are managed by IAM."            
            HelpLink="https://cloud.google.com/docs/authentication#service-accounts"            
            Type="OAuth" 
            ConnStr="Provider=GoogleBigQuery;UseCustomApp=True;UseClientCertificate=True;CertificateStorageType=PfxFile;GrantType=ClientCredentials;CertificatePath=[$PrivateKeyPath$];"
            TestEndPoint="list_projects">
            <Params>
                <Param Name="ClientId" Label="Service Account Email" Desc="This is service account email ID (e.g. some_name@my_project.iam.gserviceaccount.com)" Required="True" />
                <Param Name="PrivateKeyPath" Label="Service Account Private Key Path (i.e. *.json OR *.p12)" Desc="File path for JSON Key or p12 file (i.e. Private Key file for service account). Keep this key file secure" Required="True" />
                <Param Name="Scope" Desc="Permission(s) you like to request" Value="https://www.googleapis.com/auth/bigquery https://www.googleapis.com/auth/bigquery.insertdata https://www.googleapis.com/auth/cloud-platform https://www.googleapis.com/auth/cloud-platform.read-only https://www.googleapis.com/auth/devstorage.full_control https://www.googleapis.com/auth/devstorage.read_only https://www.googleapis.com/auth/devstorage.read_write" />

                <Param Name="ProjectId" Label="&#009;ProjectId" Value=""
		                  OptionsEndPoint="list_projects" 
		                  OptionsEndPointValueColumn="projectId" 
                        Required="True" Desc="Login to https://console.cloud.google.com/bigquery and choose Project dropdown at the top to see list of Projects. Over there you will find ProjectID next to ProjectName. You need to get ProjectID which has BigQuery API support enabled."
                        />

                <Param Name="DatasetId" Label="DatasetId (Choose after ProjectId)" Required="True" 
	                   OptionsEndPoint="list_datasets" 
	                   OptionsEndPointValueColumn="datasetId" 
	                   OptionsEndPointParameters="ProjectId=[$ProjectId$]" Value="" Desc="Default Dataset Name you like to use when listing tables (e.g. MyDataset)." />

                <Param Name="RetryMode" Value="RetryWhenStatusCodeMatch" Options="None;RetryAny;RetryWhenStatusCodeMatch;" Hidden="True" />
                <Param Name="RetryStatusCodeList" Value="429" Hidden="True" />
                <Param Name="RetryCountMax" Value="5" Hidden="True" />
                <Param Name="RetryMultiplyWaitTime" Value="True" Hidden="True" />   
                <Param Name="Location" Template="LocationParam" />
                <Param Name="ImpersonateAs" Type="Property" Key="ExtraAttributesForTokenRequest" Hidden="False" ValueTemplate="user={$value$}" Label="Impersonate As (Enter Email Id)" />
            </Params>
            <Notes>
                <![CDATA[
    <p>
        Follow these steps on how to create Service Account to authenticate and access BigQuery API in SSIS package or ODBC data source:
    </p>

<h3>Step-1: Create project</h3>
<p>
    This step is optional, if you already have a project in Google Cloud and can use it.
    However, if you don't, proceed with these simple steps to create one:
</p>
<ol>
     <li>
          <p>
             First of all, go to <a target="_blank" href="https://console.developers.google.com/">Google API Console</a>.
          </p>
     </li>
	<li>
          <p>
             Then click <strong>Select a project</strong> button and then click <strong>NEW PROJECT</strong> button:
          </p>
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/start-creating-new-project-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Start creating a new project in Google Cloud"
		     title="Starting creating a new project in Google Cloud"
		     width="1000"
		     height="340" />
	</li>
	<li>
          <p>
             Name your project and click <strong>CREATE</strong> button:
          </p>
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/create-new-project-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Create a new project in Google Cloud"
		     title="Creating a new project in Google Cloud"
		     width="680"
		     height="360" />
	</li>
	<li>
          <p>
             Wait until the project is created:
          </p>          
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/wait-until-project-is-created-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Wait until project is created in Google Cloud"
		     title="Waiting until project is created in Google Cloud"
		     width="640"
		     height="390" />
	</li>
     <li>
        Done! Let's proceed to the next step.
     </li>
</ol>



<h3>Step-2: Enable Google Cloud APIs</h3>
<p>
    In this step we will enable BigQuery API and Cloud Resource Manager API:
</p>
<ol>
	<li>
          <p>
             Select your project on the top bar:
          </p>          
		<img src="https://cdn.zappysys.com/api/Images/authentication/google/select-project-in-google-cloud.png"
		     loading="lazy"
		     decoding="async"
		     class="img-thumbnail block"
		     alt="Select project in Google Cloud"
		     title="Selecting project in Google Cloud"
		     width="1000"
		     height="400" />
	</li>
     <li>
          <p>
              Then click the <strong>"hamburger"</strong> icon on the top left and access 
              <a target="_blank" href="https://console.cloud.google.com/apis/dashboard">APIs & Services</a>:
          </p>
          <img src="https://cdn.zappysys.com/api/Images/authentication/google/access-apis-and-services-in-google-cloud.png"
               loading="lazy"
               decoding="async"
               class="img-thumbnail block"
               alt="Access APIs and services in Google Cloud"
               title="Accessing APIs and services in Google Cloud"
               width="990"
               height="440" />
    </li>
    <li>
        <p>
            Now let's enable several APIs by clicking <strong>ENABLE APIS AND SERVICES</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/enable-api-for-project-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Enable API for project in Google Cloud"
             title="Enabling API for project in Google Cloud"
             width="800"
             height="300" />
    </li>
    <li>
        <p>
            In the search bar search for <code>bigquery api</code> and then locate and select <strong>BigQuery API</strong>:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/search-for-api-in-google-cloud.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Search for API in Google Cloud"
             title="Searching for API in Google Cloud"
             width="1000"
             height="390" />
    </li>
    <li>
        <p>
            If <strong>BigQuery API</strong> is not enabled, enable it:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/enable-google-bigquery-api.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Enable Google BigQuery API"
             title="Enabling Google BigQuery API"
             width="800"
             height="350" />
    </li>
    <li>
        <p>
            Then repeat the step and enable <strong>Cloud Resource Manager API</strong> as well:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/enable-cloud-resource-manager-api.png"
             loading="lazy"
             decoding="async"
             class="img-thumbnail block"
             alt="Enable Cloud Resource Manager API"
             title="Enabling Cloud Resource Manager API"
             width="800"
             height="350" />
    </li>
    <li>
        Done! Let's proceed to the next step and create a service account.
    </li>    
</ol>



<h3>Step-3: Create Service Account</h3>
<p>
    Use the steps below to create a Service Account in Google Cloud:
</p>
<ol>    
    <li>
        <p>
            First of all, go to 
            <a target="_blank" href="https://console.cloud.google.com/iam-admin/iam">IAM & Admin</a> 
            in Google Cloud console:
        </p>
        
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/access-iam-and-admin-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Access IAM & Admin in Google Cloud" 
             title="Accessing IAM & Admin in Google Cloud" 
             width="690" 
             height="510" /> 
    </li>
    <li>        
        <p>
            Once you do that, click <strong>Service Accounts</strong> on the left side 
            and click <strong>CREATE SERVICE ACCOUNT</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/start-creating-service-account-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Start creating service account in Google Cloud" 
             title="Starting creating service account in Google Cloud" 
             width="1000" 
             height="500" /> 
    </li>
    <li>
        <p>
            Then name your service account and click <strong>CREATE AND CONTINUE</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/create-service-account-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Create service account in Google Cloud" 
             title="Creating service account in Google Cloud" 
             width="750" 
             height="450" /> 
    </li>
    <li>
        <p>
            Continue by clicking <strong>Select a role</strong> dropdown 
            and start granting service account BigQuery Admin and Project Viewer roles:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/start-granting-service-account-project-roles-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Start granting service account project roles in Google Cloud" 
             title="Starting granting service account project roles in Google Cloud" 
             width="750" 
             height="250" /> 
    </li>     
    <li>
        <p>
            Find <strong>BigQuery</strong> group on the left 
            and then click on <strong>BigQuery Admin</strong> role on the right:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/grant-service-account-bigquery-role.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Grant service account BigQuery Admin role" 
             title="Granting service account BigQuery Admin role" 
             width="540" 
             height="410" /> 
    </li>
    <li>
        <p>
            Then click <strong>ADD ANOTHER ROLE</strong> button, 
            find <strong>Project</strong> group 
            and select <strong>Viewer</strong> role:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/grant-service-account-project-viewer-role.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Grant service account project viewer role" 
             title="Granting service account project viewer role" 
             width="540" 
             height="410" />     
    </li>
    <li>
        <p>
            Finish adding roles by clicking <strong>CONTINUE</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/finish-granting-service-account-project-roles-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Finish granting service account project roles in Google Cloud" 
             title="Finishing granting service account project roles in Google Cloud" 
             width="750" 
             height="370" />
            
        <div class="alert alert-info alert-title">
            You can always add or modify permissions later in 
            <a target="_blank" href="https://console.cloud.google.com/iam-admin/iam">IAM & Admin</a>.
        </div>
    </li>
    <li>
        <p>
            Finally, in the last step, just click button <strong>DONE</strong>:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/finish-configuring-service-account-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Finish configuring service account in Google Cloud" 
             title="Finishing configuring service account in Google Cloud" 
             width="750" 
             height="350" /> 
    </li>
    <li>
        <p>
            Done! We are ready to add a Key to this service account in the next step.
        </p>
    </li>        
</ol>



<h3>Step-4: Add Key to Service Account</h3>
<p>
    We are ready to add a Key (JSON or P12 key file) to the created Service Account:
</p>
<ol>    
    <li>
        <p>
            In <a target="_blank" href="https://console.cloud.google.com/iam-admin/serviceaccounts">Service Accounts</a>
            open newly created service account:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/open-service-account-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Open service account in Google Cloud" 
             title="Opening service account in Google Cloud" 
             width="750" 
             height="350" /> 
    </li>    
    <li>
        <p>
            Next, copy email address of your service account for the later step:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/copy-service-account-email-address-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Copy service account email address in Google Cloud" 
             title="Copying service account email address in Google Cloud" 
             width="600" 
             height="370" /> 
    </li>
    <li>
        <p>
            Continue by selecting <strong>KEYS</strong> tab, 
            then press <strong>ADD KEY</strong> dropdown, 
            and click <strong>Create new key menu</strong> item:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/start-creating-key-for-service-account-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Start creating key for service account in Google Cloud" 
             title="Starting creating key for service account in Google Cloud" 
             width="750" 
             height="530" /> 
    </li>
    <li>
        <p>
            Finally, select <strong>JSON</strong> (Engine v19+) or <strong>P12</strong> option and hit <strong>CREATE</strong> button:
        </p>
        <img src="https://cdn.zappysys.com/api/Images/authentication/google/Service-Account/create-p12-key-for-service-account-in-google-cloud.png" 
             loading="lazy" 
             decoding="async" 
             class="img-thumbnail block" 
             alt="Create JSON or P12 key for service account in Google Cloud" 
             title="Creating JSON or P12 key for service account in Google Cloud" 
             width="600" 
             height="400" /> 
    </li>    
    <li>
        Key file downloads into your machine. We have all the data needed for authentication, let's proceed to the last step!
    </li>
</ol>

<h3>Step-5: Configure connection</h3>
<ol>
    <li>
        <p>
            Now go to SSIS package or ODBC data source and configure these fields in <strong>Service Account</strong> authentication configuration:
        </p>
	   <ul>
            <li>In the <strong>Service Account Email</strong> field paste the service account <strong>Email address</strong> value you copied in the previous step.</li>        
            <li>In the <strong>Service Account Private Key Path (i.e. *.json OR *.p12)</strong> field use downloaded certificate's file path.</li>
       </ul>
    </li>       
    <li>
        Done! Now you are ready to use Google BigQuery Connector!
    </li>
</ol>

]]>
            </Notes>
      </Auth>

  </Auths>

  <Template>
    <EndPoint Name="Paginate">
      <Params>
        <Param Name="NextUrlAttributeOrExpr" Type="Property" Value="$.nextPageToken" />
        <Param Name="NextUrlSuffix" Type="Property" Value="pageToken=&lt;%nextlink%&gt;" />
      </Params>
    </EndPoint>
    <EndPoint Name="PaginateRows">
      <Params>
        <Param Name="NextUrlAttributeOrExpr" Type="Property" Value="$.pageToken" Hidden="True"/>
        <Param Name="NextUrlSuffix" Type="Property" Value="pageToken=&lt;%nextlink%&gt;" Hidden="True"/>
      </Params>
    </EndPoint>
	
	<Param Name="LocationParam" Value="" Label="Job Location" 
		Options="System Default=;Data centers in the United States=US;Data centers in the European Union=EU;Columbus, Ohio=us-east5;Iowa=us-central1;Las Vegas=us-west4;Los Angeles=us-west2;Montréal=northamerica-northeast1;Northern Virginia=us-east4;Oregon=us-west1;Salt Lake City=us-west3;São Paulo=southamerica-east1;Santiago=southamerica-west1;South Carolina=us-east1;Toronto=northamerica-northeast2;Delhi=asia-south2;Hong Kong=asia-east2;Jakarta=asia-southeast2;Melbourne=australia-southeast2;Mumbai=asia-south1;Osaka=asia-northeast2;Seoul=asia-northeast3;Singapore=asia-southeast1;Sydney=australia-southeast1;Taiwan=asia-east1;Tokyo=asia-northeast1;Belgium=europe-west1;Finland=europe-north1;Frankfurt=europe-west3;London=europe-west2;Madrid=europe-southwest1;Milan=europe-west8;Netherlands=europe-west4;Paris=europe-west9;Warsaw=europe-central2;Zürich=europe-west6;AWS - US East (N. Virginia)=aws-us-east-1;Azure - East US 2=azure-eastus2;Custom Name (Type your own)=type-region-id-here"
		Desc="The geographic location where the job should run. For Non-EU and Non-US datacenters we suggest you to supply this parameter to avoid any error." HelpLink="https://cloud.google.com/bigquery/docs/locations#specifying_your_location"/>
		
  </Template>

  <EndPoints>
    <EndPoint Name="get_query" IsSqlEndPoint="True" Label="Read Data using SQL Query -OR- Execute Script (i.e. CREATE, SELECT, INSERT, UPDATE, DELETE)"
              Group="Query"
              Desc="Runs a BigQuery SQL query synchronously and returns query results if the query completes within a specified timeout"
              HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query"
              Url="projects/[$ProjectId$]/queries" Method="POST" Filter="$.jobReference.jobId"
              ContentType="application/json" NoOutputColumns="True"
              MetaDetectionOrder="Static" MetaScanRows="10000" CachedTtl="30" UseRawCache="True" 
              OutputDefaultColumnIfNoMeta="True"
      >
      <Body><![CDATA[{"query":"[$Sql$]","maxResults":10,"timeoutMs":[$timeoutMs$],"useLegacySql":[$useLegacySql$] , "location" : "[$Location$]" }]]></Body>
      <Params>
        <Param Name="Sql" IsSqlParameter="True" Label="SQL Statement (i.e. SELECT / DROP / CREATE)" Editor="MultiLine" Required="True" Functions="JSONENC"
               Value="SELECT title,id,language,wp_namespace,reversion_id ,comment,num_characters FROM bigquery-public-data.samples.wikipedia LIMIT 1000"
			   DefaultValue="SELECT title,id,language,wp_namespace,reversion_id ,comment,num_characters FROM bigquery-public-data.samples.wikipedia LIMIT 1000"
               Options="Example1=SELECT title,id,language,wp_namespace,reversion_id ,comment,num_characters FROM bigquery-public-data.samples.wikipedia LIMIT 1000;Example2=CREATE TABLE TestDataset.Table1 (ID INT64,Name STRING,BirthDate DATETIME, Active BOOL);Example3=INSERT TestDataset.Table1 (ID, Name,BirthDate,Active) VALUES(1,'AA','2020-01-01',true),(2,'BB','2020-01-02',true),(3,'CC','2020-01-03',false)"
               />
        <Param Name="useLegacySql" Label="Use Legacy SQL Syntax?" Options="false;true" Value="false" />
        <Param Name="timeoutMs" Value="90000" Label="timeout (Milliseconds)" Desc="Wait until timeout is reached." Options="false;true" />
		<Param Name="Location" Template="LocationParam" />
      </Params>

      <EndPoint Name="child_check_job_status" Url="projects/[$ProjectId$]/jobs/[$parent.jobId$]?location=[$Location$]" Method="GET" Filter="$.status.state--FAST" MaxRows="1" NoOutputColumns="True" >
        <Params>
          <Param Name="EnableStatusCheck" Type="Property" Value="true" />
          <Param Name="StatusCheckIterationWaitSeconds" Type="Property" Value="4" />
          <Param Name="StatusCheckMaxWaitSeconds" Type="Property" Value="7200" />
          <Param Name="StatusContinueValue" Type="Property" Value="PENDING|RUNNING" />
          <Param Name="StatusSuccessValue" Type="Property" Value="DONE" />
        </Params>
        <EndPoint Name="child_get_job_result" Template="PaginateRows" Url="projects/[$ProjectId$]/queries/[$parent.[0].jobId$]?location=[$Location$]" Method="GET" Filter="$.rows[*]">
          <Params>
            <Param Name="EnableStatusCheck" Type="Property" Value="false" />
            <Param Name="ArrayTransformType" Type="Property" Value="TransformComplexTwoDimensionalArray"  />
            <Param Name="ArrayTransRowValueFilter" Type="Property" Value="$.f[*].v" />
            <Param Name="ArrayTransColumnNameFilter" Type="Property" Value="$.schema.fields[*].name"  />
          </Params>
        </EndPoint>
      </EndPoint>

      <OutputColumns>
        <Column Name="-Dynamic-" Expand="True" 
		        DataEndPoint="get_query_schema"
                ColumnInfoMap="Name=name;DataType=type" 
				DataTypeMap="DT_I8=int,smallint,integer,bigint,tinyint,byteint;DT_WSTR=string,record;DT_DBTIMESTAMP=datetime;DT_DBDATE=date;DT_BOOL=bool,boolean;DT_R8=float,numeric,timestamp;DT_STR=bytes;DT_STR=bignumeric,bigdecimal" 
				/>
				
        <Column Name="[$parent.name$]" Label="[$parent.name$]_DT" Expand="True" 
				DataEndPoint="get_query_schema"
				DataEndPointParameters="Filter=$.schema.fields[?(@type=='TIMESTAMP')];Sql=[$Sql$]"
                ColumnInfoMap="Name=name;DataType=type" 
				DataType="DT_DBTIMESTAMP" 
				Functions="FUN_UNIX_TIMESTAMP_MS_TO_DATE"
				/>
      </OutputColumns>

    </EndPoint>

    <EndPoint  Name="read_table_data" Label="Read Table Rows" Group="Table" Desc="Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table."
          HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/list"
          Url="projects/[$ProjectId$]/datasets/[$DatasetId$]/tables/[$TableId$]/data" Method="GET" Filter="$.rows[*]" MetaDetectionOrder="Static"
           Template="PaginateRows">
      <Params>
        <Param Name="ProjectId" Label="&#009;&#009;ProjectId" Value="" OptionsEndPoint="list_projects" OptionsEndPointValueColumn="projectId" OptionsEndPointLabelColumn="name" Desc="Leave this value blank to use ProjectId from connection settings" />
        <Param Name="DatasetId" Label="&#009;DatasetId" Desc="Leave this value blank to use DatasetId from connection settings" OptionsEndPoint="list_datasets" OptionsEndPointValueColumn="datasetId" OptionsEndPointLabelColumn="datasetId" />
        <Param Name="TableId" Label="&#009;TableId"  Required="True" OptionsEndPoint="list_tables" OptionsEndPointParameters="DatasetId=[$DatasetId$]" OptionsEndPointValueColumn="tableId" OptionsEndPointLabelColumn="tableId" />
        <Param Name="ArrayTransformType" Type="Property" Value="TransformComplexTwoDimensionalArray"  />
        <Param Name="ArrayTransRowValueFilter" Type="Property" Value="$.f[*].v" />
      </Params>

      <OutputColumns>
        <Column Name="-Dynamic-" Expand="True"
                DataEndPoint="get_table_schema"
                DataEndPointParameters="ProjectId=[$ProjectId$];DatasetId=[$DatasetId$];TableId=[$TableId$];"
                ColumnInfoMap="Name=name;DataType=type"
                DataTypeMap="DT_I8=int,smallint,integer,bigint,tinyint,byteint;DT_WSTR=string,record;DT_DBTIMESTAMP=datetime;DT_DBDATE=date;DT_BOOL=bool,boolean;DT_R8=float,numeric,timestamp;DT_STR=bytes;DT_STR=bignumeric,bigdecimal" 
				/>
        <Column Name="[$parent.name$]" Label="[$parent.name$]_DT" Expand="True"
                DataEndPoint="get_table_schema"
                DataEndPointParameters="ProjectId=[$ProjectId$];DatasetId=[$DatasetId$];TableId=[$TableId$];Filter=$.schema.fields[?(@type=='TIMESTAMP')]"
                ColumnInfoMap="Name=name;DataType=type"
				DataType="DT_DBTIMESTAMP" 
				Functions="FUN_UNIX_TIMESTAMP_MS_TO_DATE" 
				/>				
      </OutputColumns>

    </EndPoint>

    <EndPoint  Name="get_[$parent.tableReference.datasetId$]_[$parent.tableReference.tableId$]" Label="[$parent.tableReference.datasetId$].[$parent.tableReference.tableId$]"
               Group="Records" Desc="Read data from [$parent.tableReference.datasetId$].[$parent.tableReference.tableId$] for project ."
               HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/tabledata/list"
               Url="projects/[$parent.tableReference.projectId$]/datasets/[$parent.tableReference.datasetId$]/tables/[$parent.tableReference.tableId$]/data"
               Method="GET" Filter="$.rows[*]" MetaDetectionOrder="Static"
               Template="PaginateRows"
               Expand="True"
               DataEndPoint="list_tables"
      >
      <Params>
        <Param Name="ArrayTransformType" Type="Property" Value="TransformComplexTwoDimensionalArray"  />
        <Param Name="ArrayTransRowValueFilter" Type="Property" Value="$.f[*].v" />
      </Params>

      <OutputColumns>
        <Column Name="-Dynamic-"
                Expand="True"
                DataEndPoint="get_table_schema"
				CachedTtl="5"
                ColumnInfoMap="Name=name;DataType=type"
                DataEndPointParameters="DatasetId=[$parent.tableReference.datasetId$];TableId=[$parent.tableReference.tableId$]"
                DataTypeMap="DT_I8=int,smallint,integer,bigint,tinyint,byteint;DT_WSTR=string,record;DT_DBTIMESTAMP=datetime;DT_DBDATE=date;DT_BOOL=bool,boolean;DT_R8=float,numeric,timestamp;DT_STR=bytes;DT_STR=bignumeric,bigdecimal"
      />
        <Column Name="[$parent.name$]" Label="[$parent.name$]_DT" 
                Expand="True"
                DataEndPoint="get_table_schema"
                DataEndPointParameters="DatasetId=[$parent.tableReference.datasetId$];TableId=[$parent.tableReference.tableId$];Filter=$.schema.fields[?(@type=='TIMESTAMP')]"
                ColumnInfoMap="Name=name;DataType=type"
				DataType="DT_DBTIMESTAMP" 
				Functions="FUN_UNIX_TIMESTAMP_MS_TO_DATE" 
				/>	  
      </OutputColumns>
    </EndPoint>


    <EndPoint Name="list_projects" Label="List Projects"  Group="Project" Desc="Lists Projects that the caller has permission on and satisfy the specified filter."
	HelpLink="https://cloud.google.com/resource-manager/reference/rest/v1/projects/list"
	Url="https://cloudresourcemanager.googleapis.com/v1/projects?filter=[$SearchFilter$]" Method="GET" Filter="$.projects[*]"
       Template="Paginate" CachedTtl="60">
      <Params>
        <Param Name="SearchFilter" Value=""
		Desc="An expression for filtering the results of the request. Filter rules are case insensitive. If multiple fields are included in a filter query, the query will return results that match any of the fields. Some eligible fields for filtering are: name, id, labels.{key} (where key is the name of a label), parent.type, parent.id, lifecycleState. Example: name:how* "
		/>

      </Params>
      <OutputColumns>
        <Column Name="projectId" DataType="DT_WSTR" Length="255" />
        <Column Name="name" DataType="DT_WSTR" Length="255"/>
        <Column Name="projectNumber" DataType="DT_WSTR" Length="255" />
        <Column Name="lifecycleState" DataType="DT_WSTR" Length="255"/>
        <Column Name="createTime" DataType="DT_DBTIMESTAMP"/>
      </OutputColumns>
    </EndPoint>

    <EndPoint Name="list_datasets" Label="List Datasets" Group="Dataset" Desc="Lists all BigQuery datasets in the specified project to which the user has been granted the READER dataset role."
	HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/list"
	Url="projects/[$ProjectId$]/datasets?filter=[$SearchFilter$]" Method="GET" Filter="$.datasets[*]" Template="Paginate" CachedTtl="60">
      <Params>
        <Param Name="ProjectId" OptionsEndPoint="list_projects" OptionsEndPointValueColumn="projectId" OptionsEndPointLabelColumn="name" Value=""  />
        <Param Name="SearchFilter" Value="" Desc="An expression for filtering the results of the request. Filter rules are case insensitive. If multiple fields are included in a filter query, the query will return results that match any of the fields. Some eligible fields for filtering are: name, id, labels.{key} (where key is the name of a label), parent.type, parent.id, lifecycleState. Example: name:how*" />

        <Param Name="all" Type="Query" Value="" Desc="Whether to list all datasets, including hidden ones" Options=";True;False"/>
      </Params>

      <OutputColumns>
        <Column Name="datasetReference.datasetId" Label="datasetId" DataType="DT_WSTR" Length="255" />
        <Column Name="datasetReference.projectId" Label="projectId" DataType="DT_WSTR" Length="255" />

        <Column Name="kind" DataType="DT_WSTR" Length="100"/>
        <Column Name="id" DataType="DT_WSTR" Length="100"/>
        <Column Name="location" DataType="DT_WSTR" Length="100"/>
      </OutputColumns>

    </EndPoint>

    <EndPoint Name="post_dataset" Label="Create Dataset" Group="Dataset" Desc="Creates a new empty dataset."
 HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/insert"
 Url="projects/[$ProjectId$]/datasets" Method="POST" ContentType="application/json">

      <Body><![CDATA[{"description": "[$Description$]", "datasetReference": { "datasetId": "[$Name$]"} } ]]></Body>
      <Params>
        <Param Name="ProjectId" OptionsEndPoint="list_projects" OptionsEndPointValueColumn="projectId" OptionsEndPointLabelColumn="name" Value=""  />
        <Param Name="Name" Label="Dataset Name" Value="" Desc="Enter dataset name" Functions="JSONENC" Required="True" />
        <Param Name="Description" Value="This is dataset description" Functions="JSONENC" />
      </Params>

      <OutputColumns>
        <Column Name="datasetReference.datasetId" Label="datasetId" DataType="DT_WSTR" Length="255" />
        <Column Name="datasetReference.projectId" Label="projectId" DataType="DT_WSTR" Length="255" />

        <Column Name="kind" DataType="DT_WSTR" Length="100"/>
        <Column Name="id" DataType="DT_WSTR" Length="100"/>
        <Column Name="location" DataType="DT_WSTR" Length="100"/>
        <Column Name="friendlyName" DataType="DT_WSTR" Length="255"/>
        <Column Name="description" DataType="DT_WSTR" Length="1000"/>
        <Column Name="access" DataType="DT_WSTR" Length="1000"/>
      </OutputColumns>

    </EndPoint>

    <EndPoint Name="delete_dataset" Label="Delete Dataset" Group="Dataset" Desc="Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name."
   HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/datasets/delete"
   Url="projects/[$ProjectId$]/datasets/[$DatasetId$]" Method="DELETE" >
      <Params>
        <Param Name="ProjectId" OptionsEndPoint="list_projects" OptionsEndPointValueColumn="projectId" OptionsEndPointLabelColumn="name" Value=""  />
        <Param Name="DatasetId" Label="&#009;DatasetId" Required="True" OptionsEndPoint="list_datasets" OptionEndPointParameters="ProjectId=[$ProjectId$];" OptionsEndPointValueColumn="datasetId" OptionsEndPointLabelColumn="id" Value=""  />
        <Param Name="deleteContents" Label="&#009;&#009;Delete All Tables" Type="Query" Options=";True;False" Desc="If True, delete all the tables in the dataset. If False and the dataset contains tables, the request will fail. Default is False"/>
      </Params>
      <OutputColumns>
        <Column Name="Response" DataType="DT_WSTR" Length="255" />
      </OutputColumns>
    </EndPoint>

    <EndPoint Name="delete_table" Label="Delete Table" Group="Table" Desc="Deletes the dataset specified by the datasetId value. Before you can delete a dataset, you must delete all its tables, either manually or by specifying deleteContents. Immediately after deletion, you can create another dataset with the same name."
   HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/delete"
   Url="projects/[$ProjectId$]/datasets/[$DatasetId$]/tables/[$TableId$]" Method="DELETE" >
      <Params>
        <Param Name="ProjectId" Label="&#009;&#009;ProjectId" Value="" OptionsEndPoint="list_projects" OptionsEndPointValueColumn="projectId" OptionsEndPointLabelColumn="name" />
        <Param Name="DatasetId" Label="&#009;DatasetId" OptionsEndPoint="list_datasets" OptionEndPointParameters="ProjectId=[$ProjectId$];" OptionsEndPointValueColumn="datasetId" OptionsEndPointLabelColumn="id" Value=""  />
        <Param Name="TableId" Label="TableId" Required="True" OptionsEndPoint="list_tables" OptionsEndPointParameters="DatasetId=[$DatasetId$]" OptionsEndPointValueColumn="tableId" OptionsEndPointLabelColumn="tableId" />
      </Params>
      <OutputColumns>
        <Column Name="Response" DataType="DT_WSTR" Length="255" />
      </OutputColumns>
    </EndPoint>

    <EndPoint Name="list_tables" Label="List Tables" Group="Table" Desc="Lists BigQuery Tables for the specified project / dataset to which the user has been granted the READER dataset role."
	HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/list"
	Url="projects/[$ProjectId$]/datasets/[$DatasetId$]/tables" Method="GET" Filter="$.tables[*]"  Template="Paginate" CachedTtl="60">
      <Params>
        <Param Name="ProjectId" Label="&#009;&#009;ProjectId" Value="" OptionsEndPoint="list_projects" OptionsEndPointValueColumn="projectId" OptionsEndPointLabelColumn="name" />
        <Param Name="DatasetId" Label="&#009;DatasetId" Required="True" OptionsEndPoint="list_datasets" OptionEndPointParameters="ProjectId=[$ProjectId$];" OptionsEndPointValueColumn="datasetId" OptionsEndPointLabelColumn="id" Value=""  />
      </Params>

      <OutputColumns>
        <Column Name="tableReference.tableId" Label="tableId" DataType="DT_WSTR" Length="255" />
        <Column Name="tableReference.datasetId" Label="datasetId" DataType="DT_WSTR" Length="255" />
        <Column Name="tableReference.projectId" Label="projectId" DataType="DT_WSTR" Length="255" />

        <Column Name="kind" DataType="DT_WSTR" Length="100"/>
        <Column Name="id" DataType="DT_WSTR" Length="100"/>
        <Column Name="type" DataType="DT_WSTR" Length="100"/>
        <Column Name="creationTime" DataType="DT_DBTIMESTAMP" Functions="UNIX_TIMESTAMP_MS_TO_DATE"/>
      </OutputColumns>

    </EndPoint>

    <EndPoint  Name="get_query_schema" Group="Query" Label="Get Query Schema (From SQL)" Desc="Runs a BigQuery SQL query synchronously and returns query schema"
	HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query"
	Url="projects/[$ProjectId$]/queries" Method="POST" Filter="$.schema.fields[*]"
  ContentType="application/json" Hidden="True" CachedTtl="60" UseRawCache="True">
      <Body><![CDATA[{"query":"[$Sql$]","maxResults":10,"timeoutMs":[$timeoutMs$],"useLegacySql":[$useLegacySql$] , "location" : "[$Location$]" }]]></Body>
      <Params>
        <Param Name="Sql" Label="SQL Query" Editor="MultiLine" Required="True" Functions="JSONENC" Value="SELECT title,id,language,wp_namespace,reversion_id ,comment,num_characters FROM bigquery-public-data.samples.wikipedia LIMIT 1000" 
		DefaultValue="SELECT title,id,language,wp_namespace,reversion_id ,comment,num_characters FROM bigquery-public-data.samples.wikipedia LIMIT 1000"
		/>
		<Param Name="Filter" Required="True" Value="$.schema.fields[*]" Hidden="True"/>
        <Param Name="useLegacySql" Label="Use Legacy SQL Syntax?" Options="false;true" Value="false" />
        <Param Name="timeoutMs" Value="90000" Label="timeout (Milliseconds)" Desc="Wait until timeout is reached." Options="false;true" />
		<Param Name="Location" Template="LocationParam" />
      </Params>
      <OutputColumns>
        <Column Name="name" DataType="DT_WSTR" Length="255" />
        <Column Name="type" DataType="DT_WSTR" Length="255" />
      </OutputColumns>
    </EndPoint>

    <EndPoint  Name="get_table_schema" Label="Get Table Schema" Group="Table" 
	  Desc="Gets the specified table resource by table ID. This method does not return the data in the table, it only returns the table resource, which describes the structure of this table."
      HelpLink="https://cloud.google.com/bigquery/docs/reference/rest/v2/tables/get"
      Url="projects/[$ProjectId$]/datasets/[$DatasetId$]/tables/[$TableId$]" 
	  Method="GET" 
	  Filter="$.schema.fields[*]"
	  UseRawCache="True"
	  CachedTtl="30"
	  >
      <Params>
        <Param Name="DatasetId" Label="DatasetId" Required="True" OptionsEndPoint="list_datasets" OptionsEndPointValueColumn="datasetId" OptionsEndPointLabelColumn="datasetId" />
        <Param Name="TableId" Label="TableId"  Required="True" OptionsEndPoint="list_tables" OptionsEndPointParameters="DatasetId=[$DatasetId$]" OptionsEndPointValueColumn="tableId" OptionsEndPointLabelColumn="tableId" />
		<Param Name="Filter" Required="True" Value="$.schema.fields[*]" Hidden="True" Options="All Columns=$.schema.fields[*];Timestamp Columns=$.schema.fields[?(@type=='TIMESTAMP')]"/>
      </Params>
      <OutputColumns>
        <Column Name="name" DataType="DT_WSTR" Length="255" />
        <Column Name="type" DataType="DT_WSTR" Length="255" />
      </OutputColumns>
    </EndPoint>

    <EndPoint Name="insert_table_data" Group="Table"
              Url="projects/[$ProjectId$]/datasets/[$DatasetId$]/tables/[$TableId$]/insertAll"
              EnableRequestCompression="True"
              Method="POST"
              BatchSize="10000"
              MaxRequestSize="10MB"              
              ContentType="application/json"
              Filter="$.insertErrors[*].errors[*]"
              FailIf="RowsFoundAndNotHandled"
              IncludeParentColumns="True"
              >
      <Params>
        <Param Name="ProjectId" Label="&#009;&#009;ProjectId" Value="" OptionsEndPoint="list_projects" OptionsEndPointValueColumn="projectId" OptionsEndPointLabelColumn="name" />
        <Param Name="DatasetId" Label="&#009;DatasetId" OptionsEndPoint="list_datasets" OptionsEndPointValueColumn="datasetId" OptionsEndPointLabelColumn="datasetId" />
        <Param Name="TableId" Label="TableId" OptionsEndPoint="list_tables" OptionsEndPointParameters="DatasetId=[$DatasetId$]" OptionsEndPointValueColumn="tableId" OptionsEndPointLabelColumn="tableId" />
      </Params>
      <Body><![CDATA[{"kind" : "bigquery#tableDataInsertAllRequest", "rows" : {$rows$}}]]></Body>
      <LayoutMap>
        <![CDATA[<?xml version="1.0" encoding="utf-8"?>
<settings>
  <dataset id="root" readfrominput="True" />
  <map name="json">
      <map src="*"  />
  </map>  
</settings>]]>
      </LayoutMap>
      <OutputColumns>
        <Column Name="P_insertErrors_index" Label="index" DataType="DT_I8" />
        <Column Name="reason" DataType="DT_WSTR" Length="255"/>
        <Column Name="location" DataType="DT_WSTR" Length="255"/>
        <Column Name="debugInfo" DataType="DT_WSTR" Length="4000"/>
        <Column Name="message" DataType="DT_WSTR" Length="4000"/>
      </OutputColumns>
    </EndPoint>

    <EndPoint Name="post_[$parent.tableReference.datasetId$]_[$parent.tableReference.tableId$]" Group="Records"
              Url="projects/[$parent.tableReference.projectId$]/datasets/[$parent.tableReference.datasetId$]/tables/[$parent.tableReference.tableId$]/insertAll"
              Expand="True"
              DataEndPoint="list_tables"
              EnableRequestCompression="True"
              Method="POST"
              BatchSize="10000"
              MaxRequestSize="9MB"
              ContentType="application/json"
              Filter="$.insertErrors[*].errors[*]"
              FailIf="RowsFoundAndNotHandled"
              IncludeParentColumns="True"
              >
      <Params>
      </Params>
      <Body><![CDATA[{"kind" : "bigquery#tableDataInsertAllRequest", "rows" : {$rows$}}]]></Body>
      <LayoutMap>
        <![CDATA[<?xml version="1.0" encoding="utf-8"?>
<settings>
  <dataset id="root" readfrominput="True" />
  <map name="json">
      <map src="*"  />
  </map>  
</settings>]]>
      </LayoutMap>
      <OutputColumns>
        <Column Name="P_insertErrors_index" Label="index" DataType="DT_I8" />
        <Column Name="reason" DataType="DT_WSTR" Length="255"/>
        <Column Name="location" DataType="DT_WSTR" Length="255"/>
        <Column Name="debugInfo" DataType="DT_WSTR" Length="4000"/>
        <Column Name="message" DataType="DT_WSTR" Length="4000"/>
      </OutputColumns>
      <InputColumns>
        <Column Name="-Dynamic-"
                Expand="True"
                DataEndPoint="get_table_schema"
                DataEndPointParameters="DatasetId=[$parent.tableReference.datasetId$];TableId=[$parent.tableReference.tableId$];Filter=$.schema.fields[*];"
                ColumnInfoMap="Name=name;DataType=type" 
				DataTypeMap="DT_I8=int,smallint,integer,bigint,tinyint,byteint;DT_WSTR=string,record;DT_DBTIMESTAMP=datetime;DT_DBDATE=date;DT_BOOL=bool,boolean;DT_R8=float,numeric,timestamp;DT_STR=bytes;DT_STR=bignumeric,bigdecimal"
         />        		 
        <Column Name="[$parent.name$]" Label="[$parent.name$]_DT" Expand="True" Desc="Alias for any TIMESTAMP fields, allows alternate way to map DateTime date to UNIX timestamp. BigQuery allows to use Timestamp or ISO date as input for INSERT/UPDATE operation on TIMESTAMP field."
				DataEndPoint="get_table_schema"
				DataEndPointParameters="DatasetId=[$parent.tableReference.datasetId$];TableId=[$parent.tableReference.tableId$];Filter=$.schema.fields[?(@type=='TIMESTAMP')]"
                ColumnInfoMap="Name=name;DataType=type" 
				DataType="DT_DBTIMESTAMP" 
				/>		 
      </InputColumns>
    </EndPoint>


  </EndPoints>

  <Tables>
    <Table Name="[$parent.tableReference.tableId$]" Label="[$parent.tableReference.datasetId$].[$parent.tableReference.tableId$]"
           SelectEndPoint="get_[$parent.tableReference.datasetId$]_[$parent.tableReference.tableId$]"
           InsertEndPoint="post_[$parent.tableReference.datasetId$]_[$parent.tableReference.tableId$]"
           LookupEndPoint="get_query"
           Expand="True"
           DataEndPoint="list_tables">
    </Table>
  </Tables>
  <Examples>

    <Example Group="ODBC" Slug="query-using-simple-sql-native-sql" Label="Query using simple SQL (Native SQL)" Default="True">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix the SQL with <code>#DirectSQL</code> to use this mode. Example queries a public dataset table (<code>bigquery-public-data.samples.wikipedia</code>); you can use your own or other free datasets (e.g. <code>nyc-tlc.yellow.trips</code>). Table name has three parts: <code>[Project.]Dataset.Table</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL
SELECT *
FROM bigquery-public-data.samples.wikipedia
LIMIT 1000
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="query-using-complex-sql-native-sql" Label="Query using complex SQL (Native SQL)">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Example uses <code>GROUP BY</code> and <code>ORDER BY</code> with the public dataset <code>bigquery-public-data.usa_names.usa_1910_2013</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL 
SELECT name, gender, SUM(number) AS total
FROM bigquery-public-data.usa_names.usa_1910_2013
GROUP BY name, gender
ORDER BY total DESC
LIMIT 10 
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="delete-multiple-records-dml-native-sql" Label="Delete multiple records (DML) (Native SQL)">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Runs a <code>DELETE</code> statement; use your own dataset and table in place of <code>TestDataset.MyTable</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL
DELETE FROM TestDataset.MyTable
WHERE Id > 5 
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="query-with-cast-unix-timestamp-as-datetime-native-sql" Label="Query with CAST unix TIMESTAMP as datetime (Native SQL)"
             HelpLink="https://cloud.google.com/bigquery/docs/reference/standard-sql/conversion_functions#cast">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Shows how to return a timestamp column as date or datetime using <code>CAST(col_timestamp AS DATE)</code> or <code>CAST(col_timestamp AS DATETIME)</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL 
SELECT id, col_timestamp, CAST(col_timestamp AS DATE) AS timestamp_as_date, CAST(col_timestamp AS DATETIME) AS timestamp_as_datetime
FROM MyProject.MyDataset.MyTable
LIMIT 10 
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="create-table-run-ddl-native-sql" Label="Create table / run DDL (Native SQL)"
             HelpLink="https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Runs a DDL statement such as <code>CREATE TABLE</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL
CREATE TABLE TestDataset.Table1 (ID INT64, Name STRING, BirthDate DATETIME, Active BOOL)
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="update-table-data-for-complex-types-record-geography-json-native-sql" Label="UPDATE table data for complex types (RECORD, Geography, JSON) (Native SQL)"
             HelpLink="https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Runs an <code>UPDATE</code> (or other DML) with complex types such as RECORD (array), Geography, and JSON.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL 
UPDATE TestDataset.DataTypeTest 
SET ColTime='23:59:59.123456',
 ColGeography=ST_GEOGPOINT(34.150480, -84.233870),
 ColRecord=(1,"AA","Column3 data"),
 ColBigNumeric=1222222222222222222.123456789123456789123456789123456789,
 ColJson= JSON_ARRAY('{"doc":1, "values":[{"id":1},{"id":2}]}') 
WHERE ColInteger=1
]]>
      </Code>
    </Example>

	<Example Group="ODBC" Slug="insert-datetime-unix-timestamp-values-native-sql" Label="INSERT datetime / Unix timestamp values (Native SQL)">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Shows how to insert datetime or Unix timestamp values into a TIMESTAMP column (datetime strings or Unix epoch, with or without milliseconds). Use the <code>_DT</code> alias on the column if your ETL tool expects a datetime type.</p>
      ]]></Desc>
	<Code>
        <![CDATA[#DirectSQL 
--DateTime Without milliseconds		
INSERT INTO TestDataset.DataTypeTest (ColInteger,ColString,ColTimestamp) VALUES(1004,'My date time','2025-01-31T23:59:59');

--DateTime with milliseconds
INSERT INTO TestDataset.DataTypeTest (ColInteger,ColString,ColTimestamp) VALUES(1004,'My date time','2025-01-31T23:59:59.1234');

--DateTime alias field (e.g. {your-timestamp-column}_DT) - Useful if ETL tool needs DateTime datatype rather than Numeric Unix Epoch
INSERT INTO TestDataset.DataTypeTest (ColInteger,ColString,ColTimestamp_DT) VALUES(1004,'My date time','2025-01-31T23:59:59');

--Unix Epoch format (without milliseconds)
INSERT INTO TestDataset.DataTypeTest (ColInteger,ColString,ColTimestamp) VALUES(1006,'My date time',1738367999);

--Unix Epoch format (with milliseconds)
INSERT INTO TestDataset.DataTypeTest (ColInteger,ColString,ColTimestamp) VALUES(1006,'My date time',1738367999.12345);
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="drop-table-if-exists-other-ddl-native-sql" Label="DROP table (if exists) / other DDL (Native SQL)"
             HelpLink="https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_statement">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Runs a DDL statement such as <code>DROP TABLE IF EXISTS</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL
DROP TABLE IF EXISTS Myproject.Mydataset.Mytable
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="call-stored-procedure-native-sql" Label="Call stored procedure (Native SQL)"
             HelpLink="https://cloud.google.com/bigquery/docs/procedures">
      <Desc><![CDATA[
<p><strong>Native SQL</strong> — the SQL runs on the BigQuery side (server-side). Prefix with <code>#DirectSQL</code> to use this mode. Runs a BigQuery stored procedure with parameters; example assumes a procedure <code>usp_GetData</code> in <code>TestDataset</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[#DirectSQL
CALL TestDataset.usp_GetData(1)
]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="insert-single-row" Label="INSERT single row">
      <Desc><![CDATA[
<p>Inserts a single row using ZappySys query syntax. You can optionally use <code>WITH(DatasetId='...', ProjectId='...', Output='*')</code>.</p>
      ]]></Desc>
      <Code>
        <![CDATA[INSERT INTO MyBQTable1(SomeBQCol1, SomeBQCol2) VALUES(1,'AAA')
--WITH(DatasetId='TestDataset',Output='*')
--WITH(DatasetId='TestDataset',ProjectId='MyProjectId',Output='*')]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="insert-multiple-rows-from-sql-server" Label="INSERT multiple rows from SQL Server"
             HelpLink="https://zappysys.com/links/?id=10114">
      <Desc><![CDATA[
<p>Bulk insert into a BigQuery table from Microsoft SQL Server. The column list is taken from the source query; use SQL aliases so column names match the target table.</p>
      ]]></Desc>
      <Code>
        <![CDATA[INSERT INTO MyBQTable1
SOURCE(
    'MSSQL'
  , 'Data Source=localhost;Initial Catalog=tempdb;Integrated Security=true'
  , 'SELECT Col1 AS SomeBQCol1, Col2 AS SomeBQCol2 FROM SomeTable WHERE SomeCol=123'
)
--WITH(DatasetId='TestDataset',Output='*')
--WITH(DatasetId='TestDataset',ProjectId='MyProjectId',Output='*')]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="insert-multiple-rows-from-any-odbc-source-dsn" Label="INSERT multiple rows from any ODBC source (DSN)"
             HelpLink="https://zappysys.com/links/?id=10114">
      <Desc><![CDATA[
<p>Bulk insert into a BigQuery table from any ODBC source (DSN). The column list comes from the source query; use aliases so names match the target table.</p>
      ]]></Desc>
      <Code>
        <![CDATA[INSERT INTO MyBQTable1
SOURCE(
    'ODBC'
  , 'DSN=MyDsn'
  , 'SELECT Col1 AS SomeBQCol1, Col2 AS SomeBQCol2 FROM SomeTable WHERE SomeCol=123'
)
WITH(DatasetId='TestDataset')]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="insert-multiple-rows-from-json-files-api" Label="INSERT multiple rows from JSON files / API"
             HelpLink="https://zappysys.com/links/?id=10114">
      <Desc><![CDATA[
<p>Bulk insert into a BigQuery table from JSON (URL or file). Column list comes from the source query; use aliases to match the target table. In the code, an ODBC connection to a JSON source is used (e.g. ZappySys ODBC JSON driver); you can use a similar approach with CSV or XML sources.</p>
      ]]></Desc>
      <Code>
        <![CDATA[INSERT INTO MyBQTable1
SOURCE(
    'ODBC'
  , 'Driver={ZappySys JSON Driver};Src=''https://some-url/get-data'''
  , 'SELECT Col1 AS SomeBQCol1, Col2 AS SomeBQCol2 FROM _root_'
)
--WITH(DatasetId='TestDataset',Output='*')
--WITH(DatasetId='TestDataset',ProjectId='MyProjectId',Output='*')]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="list-projects" Label="List projects">
      <Desc><![CDATA[
<p>Returns projects the user has access to.</p>
      ]]></Desc>
      <Code><![CDATA[SELECT * FROM list_projects]]></Code>
    </Example>

    <Example Group="ODBC" Slug="list-datasets" Label="List datasets">
      <Desc><![CDATA[
<p>Returns datasets for the given project. If <code>ProjectId</code> is omitted, connection-level settings are used.</p>
      ]]></Desc>
      <Code>
        <![CDATA[SELECT * FROM list_datasets
--WITH(ProjectId='MyProjectId')]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="list-tables" Label="List tables">
      <Desc><![CDATA[
<p>Returns tables for the given project/dataset. If <code>ProjectId</code> or <code>DatasetId</code> is omitted, connection-level settings are used.</p>
      ]]></Desc>
      <Code>
        <![CDATA[SELECT * FROM list_tables
--WITH(ProjectId='MyProjectId')
--WITH(ProjectId='MyProjectId',DatasetId='MyDatasetId')]]>
      </Code>
    </Example>

    <Example Group="ODBC" Slug="delete-dataset" Label="Delete dataset">
      <Desc><![CDATA[
<p>Deletes a dataset by ID. Use <code>deleteContents='true'</code> to delete all tables in the dataset.</p>
      ]]></Desc>
      <Code><![CDATA[SELECT * FROM delete_dataset WITH(DatasetId='MyDatasetId', deleteContents='False')]]></Code>
    </Example>


  </Examples>
</ApiConfig>