Skip to content

Commit 296c5a3

Browse files
committed
Fixed issue of failing tests in github actions
1 parent 68fa020 commit 296c5a3

File tree

4 files changed

+50
-22
lines changed

4 files changed

+50
-22
lines changed

.github/workflows/integration.yml

Lines changed: 45 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,14 @@ on:
99
jobs:
1010
build_and_test:
1111
runs-on: ubuntu-latest
12-
12+
environment: azure-prod
1313
env:
14-
DATABRICKS_SERVER_HOSTNAME: ${{ secrets.DATABRICKS_HOST }}
15-
DATABRICKS_HTTP_PATH: ${{ secrets.TEST_PECO_WAREHOUSE_HTTP_PATH }}
14+
DATABRICKS_SERVER_HOSTNAME: ${{ secrets.DATABRICKS_SERVER_HOSTNAME }}
15+
DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH }}
1616
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
1717
DATABRICKS_CATALOG: ${{ secrets.DATABRICKS_CATALOG }}
1818
DATABRICKS_SCHEMA : ${{ secrets.DATABRICKS_SCHEMA }}
19-
DATABRICKS_USER: ${{ secrets.TEST_PECO_SP_ID }}
19+
DATABRICKS_USER: ${{ secrets.DATABRICKS_USER }}
2020

2121
steps:
2222
# Checkout your own repository
@@ -28,7 +28,7 @@ jobs:
2828
uses: actions/checkout@v3
2929
with:
3030
repository: jprakash-db/databricks-sql-python
31-
path: databricks_sql_connector_core
31+
path: databricks_sql_python
3232
ref : jprakash-db/PECO-1803
3333

3434
# Set up Python
@@ -37,29 +37,58 @@ jobs:
3737
with:
3838
python-version: '3.9'
3939

40+
# #----------------------------------------------
41+
# # ----- install & configure poetry -----
42+
# #----------------------------------------------
43+
# - name: Install Poetry
44+
# uses: snok/install-poetry@v1
45+
# with:
46+
# virtualenvs-create: true
47+
# virtualenvs-in-project: true
48+
# installer-parallel: true
49+
4050
# Install Poetry
4151
- name: Install Poetry
4252
run: |
4353
python -m pip install --upgrade pip
4454
pip3 install poetry
55+
python3 -m venv venv
56+
ls databricks_sql_python/databricks_sql_connector_core
4557
46-
# Build the .whl file in the dependency repository
47-
- name: Build Dependency Package
58+
# Install the requirements of your repository
59+
- name: Install Dependencies
4860
run: |
49-
poetry build -C databricks_sql_connector_core
61+
source venv/bin/activate
62+
poetry build
63+
pip3 install dist/*.whl
5064
51-
# Install the .whl file using pip
52-
- name: Install Dependency Package
65+
# Build the .whl file in the dependency repository
66+
- name: Build Dependency Package
5367
run: |
54-
pip3 install databricks_sql_connector_core/dist/*.whl
68+
source venv/bin/activate
69+
pip3 install databricks_sql_python/databricks_sql_connector_core/dist/*.whl
5570
56-
# Install the requirements of your repository
57-
- name: Install Dependencies
58-
run: |
59-
poetry install
71+
# # Install the .whl file using pip
72+
# - name: Install Dependency Package
73+
# run: |
74+
# pip3 install databricks_sql_python/databricks_sql_connector_core/dist/*.whl
6075

6176
# Run pytest to execute tests in your repository
6277
- name: Run Tests
6378
run: |
64-
python -m pytest src/databricks_sqlalchemy --dburi "databricks://token:$DATABRICKS_TOKEN@$DATABRICKS_SERVER_HOSTNAME?http_path=$DATABRICKS_HTTP_PATH&catalog=$DATABRICKS_CATALOG&schema=$DATABRICKS_SCHEMA"
79+
source venv/bin/activate
80+
pip3 list
81+
pip3 install pytest
82+
83+
- name : Main Tests
84+
run: |
85+
source venv/bin/activate
86+
pytest src/databricks_sqlalchemy/test_local
87+
# cd src/databricks_sqlalchemy
88+
# python -m pytest test_local --dburi "databricks://token:[email protected]?http_path=/sql/1.0/warehouses/dd43ee29fedd958d&catalog=peco&schema=default"
89+
# DBURI="databricks://token:${DATABRICKS_TOKEN}@${DATABRICKS_SERVER_HOSTNAME}?http_path=${DATABRICKS_HTTP_PATH}&catalog=${DATABRICKS_CATALOG}&schema=${DATABRICKS_SCHEMA}"
90+
# python -m pytest test_local --dburi "$DBURI"
91+
# poetry run python -m pytest test_local --dburi "databricks://token:${DATABRICKS_TOKEN}@${DATABRICKS_SERVER_HOSTNAME}?http_path=${DATABRICKS_HTTP_PATH}&catalog=${DATABRICKS_CATALOG}&schema=${DATABRICKS_SCHEMA}"
92+
93+
# python -m pytest src/databricks_sqlalchemy/test_local --dburi "databricks://token:$DATABRICKS_TOKEN@$DATABRICKS_SERVER_HOSTNAME?http_path=$DATABRICKS_HTTP_PATH&catalog=$DATABRICKS_CATALOG&schema=$DATABRICKS_SCHEMA"
6594

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ pytest-dotenv = "^0.5.2"
4545
"Bug Tracker" = "https://github.com/databricks/databricks-sql-python/issues"
4646

4747
[tool.poetry.plugins."sqlalchemy.dialects"]
48-
"databricks" = "databricks.sqlalchemy:DatabricksDialect"
48+
"databricks" = "databricks_sqlalchemy:DatabricksDialect"
4949

5050
[build-system]
5151
requires = ["poetry-core>=1.0.0"]

src/databricks_sqlalchemy/_types.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,8 @@
66
from sqlalchemy.engine.interfaces import Dialect
77
from sqlalchemy.ext.compiler import compiles
88

9-
# from databricks.sql.utils import ParamEscaper
10-
import databricks.sql
11-
from databricks.sql.utils import ParamEscaper
9+
from databricks_sql_connector_core.sql.utils import ParamEscaper
10+
1211

1312

1413
def process_literal_param_hack(value: Any):
@@ -112,7 +111,7 @@ class TIMESTAMP_NTZ(sqlalchemy.types.TypeDecorator):
112111
113112
Our dialect maps sqlalchemy.types.DateTime() to this type, which means that all DateTime()
114113
objects are stored without tzinfo. To read and write timezone-aware datetimes use
115-
databricks.sql.TIMESTAMP instead.
114+
databricks_sql_connector_core.sql.TIMESTAMP instead.
116115
117116
https://docs.databricks.com/en/sql/language-manual/data-types/timestamp-ntz-type.html
118117
"""

src/databricks_sqlalchemy/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
import databricks_sqlalchemy._ddl as dialect_ddl_impl
44
import databricks_sqlalchemy._types as dialect_type_impl
5-
from databricks import sql
5+
from databricks_sql_connector_core import sql
66
from databricks_sqlalchemy._parse import (
77
_describe_table_extended_result_to_dict_list,
88
_match_table_not_found_string,

0 commit comments

Comments
 (0)