I Have a python workflow using prefect I run it in my local computer in work fine but when I build a docker container for and run in docker container it give me this erorr
Traceback (most recent call last):
File "/usr/local/lib/python3.8/site-packages/prefect/engine.py", line 1221, in orchestrate_task_run
result = await run_sync(task.fn, *args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/prefect/utilities/asyncutils.py", line 57, in run_sync_in_worker_thread
return await anyio.to_thread.run_sync(call, cancellable=True)
File "/usr/local/lib/python3.8/site-packages/anyio/to_thread.py", line 31, in run_sync
return await get_asynclib().run_sync_in_worker_thread(
File "/usr/local/lib/python3.8/site-packages/anyio/_backends/_asyncio.py", line 937, in run_sync_in_worker_thread
return await future
File "/usr/local/lib/python3.8/site-packages/anyio/_backends/_asyncio.py", line 867, in run
result = context.run(func, *args)
File "main.py", line 65, in getData
df2=wd.write(table,df)
File "/tmp/tmpypkmh1xjprefect/utils/write_data.py", line 48, in write
if engine.has_table(TableName):
File "<string>", line 2, in has_table
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/deprecations.py", line 402, in warned
return fn(*args, **kwargs)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 3354, in has_table
with self._optional_conn_ctx_manager(None) as conn:
File "/usr/local/lib/python3.8/contextlib.py", line 113, in __enter__
return next(self.gen)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 3083, in _optional_conn_ctx_manager
with self.connect() as conn:
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 3315, in connect
return self._connection_cls(self, close_with_result=close_with_result)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 96, in __init__
else engine.raw_connection()
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 3394, in raw_connection
return self._wrap_pool_connect(self.pool.connect, _connection)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 3364, in _wrap_pool_connect
Connection._handle_dbapi_exception_noconnection(
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 2198, in _handle_dbapi_exception_noconnection
util.raise_(
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 208, in raise_
raise exception
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/base.py", line 3361, in _wrap_pool_connect
return fn()
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 310, in connect
return _ConnectionFairy._checkout(self)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 868, in _checkout
fairy = _ConnectionRecord.checkout(pool)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 476, in checkout
rec = pool._do_get()
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/impl.py", line 146, in _do_get
self._dec_overflow()
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py", line 70, in __exit__
compat.raise_(
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 208, in raise_
raise exception
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/impl.py", line 143, in _do_get
return self._create_connection()
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 256, in _create_connection
return _ConnectionRecord(self)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 371, in __init__
self.__connect()
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 666, in __connect
pool.logger.debug("Error on connect(): %s", e)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/langhelpers.py", line 70, in __exit__
compat.raise_(
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/util/compat.py", line 208, in raise_
raise exception
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/pool/base.py", line 661, in __connect
self.dbapi_connection = connection = pool._invoke_creator(self)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/create.py", line 578, in connect
return dialect.connect(*cargs, **cparams)
File "/usr/local/lib/python3.8/site-packages/sqlalchemy/engine/default.py", line 597, in connect
return self.dbapi.connect(*cargs, **cparams)
sqlalchemy.exc.DBAPIError: (pyodbc.Error) ('01000', "[01000] [unixODBC][Driver Manager]Can't open lib 'ODBC Driver 17 for SQL Server' : file not found (0) (SQLDriverConnect)")
(Background on this error at: https://sqlalche.me/e/14/dbapi)
this is my write.py file that I use it to connect to msSQl
import pyodbc
import sqlalchemy
import pyodbc
import pandas as pd
from sqlalchemy.engine import URL
from sqlalchemy import create_engine
import numpy as np
class WriteToDatabase:
def __init__(self)-> None :
pass
def createEngine(self):
connection_url = URL.create(
"mssql pyodbc",
username="user",
password="3^hGcjs!*M4AEY^s",
host="10.6.6.23",
port=1433,
database="JIRA_Issue_Database",
query={
"driver": "ODBC Driver 17 for SQL Server",
},
)
engine = create_engine(connection_url)
return engine
def removeDuplicate(self,df1,df2):
df=pd.concat([df1,df2]).drop_duplicates("Issue id",keep="first")
return df
# def removeComment(self,df):
# col_comment= [col for col in df.columns if "Comment" in col or "Attachment" in col]
# dfCleanColumn= df.drop(col_comment,axis=1)
# return dfCleanColumn
def addMoreId(self,ids):
if len(ids)==1:
return np.array([ids[0],"Dummy Id"])
return ids
def write(self,TableName,df):
#drop duplicate column
engine=self.createEngine()
df2 = df.loc[:,~df.T.duplicated(keep='first')]
if engine.has_table(TableName):
print("getting data from database...")
with engine.connect() as connection:
q=''' SELECT [Issue key]
FROM [{tableName}]
WHERE ( Created >= DATEADD(day,-7, GETDATE())) OR ( Updated >= DATEADD(day,-7, GETDATE())) OR ( Resolved >= DATEADD(day,-7, GETDATE()))'''.format(tableName=TableName)
# remove data that may duplicate when we add new data
result = connection.execute(q)
new_key=df2["Issue key"].values
result=np.array([ i[0] for i in result.all()])
all_key= np.concatenate((result,new_key))
all_key=np.unique(all_key, return_index=False)
# print(all_key.shape,result.shape)
query="DELETE FROM [{tableName}] where [Issue key] in {ids}".format(ids=tuple(self.addMoreId(all_key)),tableName=TableName)
connection.execute(query)
print("table already exist: inserting data.....")
#to avoid erorr when there is new column from data downlaod
q_col= "SELECT NAME FROM sys.columns WHERE object_id = OBJECT_ID('dbo.[{tableName}]') ".format(tableName=TableName)
cols=connection.execute(q_col)
existing_cols= [col[0] for col in cols]
df2 = df2[df2.columns.intersection(existing_cols)]
print(df2.shape,len(existing_cols))
df2.to_sql(TableName, engine,index=False,if_exists='append')
else:
print("table did not exist : inserting .....")
df2.to_sql(TableName, engine,index=False,if_exists='replace')
return df2
This is my docker file :
FROM python:3.8-slim-buster
WORKDIR /prefect-docker
COPY requirements.txt requirements.txt
RUN pip3 install -r requirements.txt
COPY . .
- My computer os : Window 10
Do I need to setup something in docker container in order to allow it to connect with sql-server?
CodePudding user response:
I have fixed this. The solution : I need to install ODBC Driver 17 for SQL Server - for Debian 10 in my docker container Thanks to alwayslearning for giving me the hint in comment section
This is my latest Dockerfile
FROM python:3.8-slim-buster
RUN apt-get update
RUN apt-get update && apt-get install -y gnupg2
RUN apt-get install -y curl apt-transport-https
RUN curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
RUN curl https://packages.microsoft.com/config/debian/11/prod.list > /etc/apt/sources.list.d/mssql-release.list
RUN apt-get update
RUN ACCEPT_EULA=Y apt-get install -y msodbcsql17 unixodbc-dev
WORKDIR /prefect-docker
COPY requirements.txt requirements.txt
RUN pip3 install -r requirements.txt
COPY . .