I'm trying to insert csv file into bigquery using python, but I think I have missed something since the result is replace,
from google.cloud import bigquery
from google.oauth2 import service_account
import os
os.environ["GOOGLE_APPLICATION_CREDENTIALS"]=r"C:/Users/Pamungkas/Documents/Dump_Data/testing-353407-a3c774efeb5a.json"
client = bigquery.Client()
table_id="testing-353407.testing_coba.Sales_Menu_COGS_Detail_Report"
file_path=r"C:\Users\Pamungkas\Downloads\Sales_Menu_COGS_Detail_Report_Jan.csv"
job_config = bigquery.LoadJobConfig(
source_format=bigquery.SourceFormat.CSV, skip_leading_rows=1, autodetect=True,
write_disposition=bigquery.WriteDisposition.WRITE_TRUNCATE
)
with open(file_path, "rb") as source_file:
job = client.load_table_from_file(source_file, table_id, job_config=job_config)
job.result() # Waits for the job to complete.
table = client.get_table(table_id) # Make an API request.
print(
"Loaded {} rows and {} columns to {}".format(
table.num_rows, len(table.schema), table_id
)
)
I guess the problem is in job_config, but I still didn't get it, can anyone help me on this?
write_dispositiontobigquery.WriteDisposition.WRITE_APPEND