#Import Module, connect to google drive
try:
import pymysql
except:
!pip install pymysql
import pymysql
import re
import pandas as pd
import threading
from queue import Queue
from google.colab import drive
drive.mount('/content/drive', force_remount=False)
path = "/content/drive/MyDrive/Colab_Notebooks/data_base/MySQL/"
#function of create table and insert data to mysql
def create_table_to_mysql(table, columns):
db = pymysql.connect(host='ithome.uk.to',user='kerek',password='',database='colab',charset="utf8")
cursor = db.cursor()
sql_statement = f"CREATE TABLE {table}("
for column in columns:
sql_statement+=f"""`{column}` text,"""
sql_statement = re.sub("[,]$", "", sql_statement)
sql_statement+=");"
cursor.execute(sql_statement)
db.commit()
def insert_data_to_mysql(que):
db = pymysql.connect(host='ithome.uk.to',user='kerek',password='',database='colab',charset="utf8")
cursor = db.cursor()
while not que.empty():
row_data = que.get()
sql_statement = f"INSERT INTO {table} Values("
for value in rowData:
sql_statement+=f"""%s,"""
sql_statement = re.sub("[,]$", "", sql_statement)
sql_statement+=")"
cursor.execute(sql_statement, row_data)
db.commit()
db.close()
#Panda read excel data to get columns and data
df = pd.read_excel(f"{path}Mp3_DataBase.xlsx", dtype=str)
df.fillna("None", inplace=True)
table = "Mp3_DataBase多線程"
allData = df.values.tolist()
columns = df.columns.tolist()
#Create table and insert data to mysql to test multithreading
try:
create_table_to_mysql(table, columns)
except:
pass
threads = []
que = Queue()
for rowData in allData:
que.put(rowData)
for l in range(0,10):
t=threading.Thread(target=insert_data_to_mysql, args=(que, ))
threads.append(t)
for l in threads:
l.start()
for l in threads:
l.join()
print("finished")