Skip to content
This repository was archived by the owner on Jan 22, 2025. It is now read-only.

Commit dec102e

Browse files
authored
Merge pull request #84 from sundy-li/uv
package to uv
2 parents 1bffb20 + c5c0976 commit dec102e

28 files changed

+1036
-364
lines changed

.github/workflows/ci.yaml

Lines changed: 11 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -9,39 +9,26 @@ on:
99
jobs:
1010
test:
1111
runs-on: ubuntu-latest
12-
services:
13-
databend:
14-
# image: datafuselabs/databend-query
15-
image: datafuselabs/databend
16-
env:
17-
QUERY_DEFAULT_USER: databend
18-
QUERY_DEFAULT_PASSWORD: databend
19-
MINIO_ENABLED: true
20-
ports:
21-
- 8000:8000
22-
- 9000:9000
12+
2313
steps:
2414
- name: Checkout
2515
uses: actions/checkout@v2
2616

27-
- name: Setup Python-3.10
28-
uses: actions/setup-python@v4
29-
with:
30-
python-version: '3.10'
17+
- name: Install uv
18+
uses: astral-sh/setup-uv@v4
3119

32-
- name: Pip Install
33-
run: |
34-
make install
20+
- name: Set up Python
21+
run: uv python install
3522

36-
- name: Verify Service Running
37-
run: |
38-
cid=$(docker ps -a | grep databend | cut -d' ' -f1)
39-
docker logs ${cid}
40-
curl -v http://localhost:8000/v1/health
23+
- name: Install the project
24+
run: uv sync --all-extras --dev
25+
26+
- name: Start databend-server
27+
run: make up
4128

4229
- name: Test
4330
env:
44-
TEST_DATABEND_DSN: "http://databend:databend@localhost:8000/default"
31+
TEST_DATABEND_DSN: "http://root:@localhost:8000/default"
4532
run: |
4633
make lint
4734
make ci

.github/workflows/release.yaml

Lines changed: 7 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -14,19 +14,14 @@ jobs:
1414
- name: Checkout Repository
1515
uses: actions/checkout@v3
1616

17-
- name: Set up Python
18-
uses: actions/setup-python@v3
19-
with:
20-
python-version: 3.9
17+
- name: Install uv
18+
uses: astral-sh/setup-uv@v4
2119

22-
- name: Install Dependencies
23-
run: |
24-
python -m pip install --upgrade pip
25-
pip install setuptools wheel twine
20+
- name: Set up Python
21+
run: uv python install
2622

27-
- name: Determine Version Change
28-
run: |
29-
export VERSION=$(cat databend_py/VERSION)
23+
- name: Install the project
24+
run: uv sync --all-extras --dev
3025

3126
- name: Release Package and Tag
3227
env:
@@ -38,7 +33,7 @@ jobs:
3833
git config user.email "hantmac@outlook.com"
3934
git tag -a "v$VERSION" -m "Release Version $VERSION"
4035
git push origin "v$VERSION"
41-
python setup.py sdist bdist_wheel
36+
uv publish
4237
echo "show user name:"
4338
echo ${{ secrets.TWINE_USERNAME }}
4439
twine upload -u ${{ secrets.TWINE_USERNAME }} -p ${{ secrets.TWINE_PASSWORD }} dist/*

.gitignore

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
upload.csv
12
.envrc
23
# Byte-compiled / optimized / DLL files
34
__pycache__/

Makefile

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,18 @@
1+
prepare:
2+
mkdir -p data/databend
3+
4+
up: prepare
5+
docker compose -f docker-compose.yaml up --quiet-pull -d databend --wait
6+
curl -u root: -XPOST "http://localhost:8000/v1/query" -H 'Content-Type: application/json' -d '{"sql": "select version()", "pagination": { "wait_time_secs": 10}}'
7+
8+
start: up
9+
110
test:
2-
python tests/test_client.py
11+
uv run pytest .
312

413
ci:
5-
python tests/test_client.py
14+
uv run pytest .
615

716
lint:
8-
pyflakes .
17+
uv run ruff check
918

10-
install:
11-
pip install -r requirements.txt
12-
pip install -e .

databend_py/client.py

Lines changed: 82 additions & 59 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,19 @@ class Client(object):
1515
"""
1616

1717
def __init__(self, *args, **kwargs):
18-
self.settings = (kwargs.pop('settings', None) or {}).copy()
19-
self.result_config = (kwargs.pop('result_config', None) or {}).copy()
18+
self.settings = (kwargs.pop("settings", None) or {}).copy()
19+
self.result_config = (kwargs.pop("result_config", None) or {}).copy()
2020
self.connection = Connection(*args, **kwargs)
2121
self.query_result_cls = QueryResult
2222
self.helper = Helper
23-
self._debug = asbool(self.settings.get('debug', False))
24-
self._uploader = DataUploader(self, self.connection, self.settings, debug=self._debug,
25-
compress=self.settings.get('compress', False))
23+
self._debug = asbool(self.settings.get("debug", False))
24+
self._uploader = DataUploader(
25+
self,
26+
self.connection,
27+
self.settings,
28+
debug=self._debug,
29+
compress=self.settings.get("compress", False),
30+
)
2631

2732
def __enter__(self):
2833
return self
@@ -34,9 +39,9 @@ def disconnect_connection(self):
3439
self.connection.disconnect()
3540

3641
def _data_generator(self, raw_data):
37-
while raw_data['next_uri'] is not None:
42+
while raw_data["next_uri"] is not None:
3843
try:
39-
raw_data = self._receive_data(raw_data['next_uri'])
44+
raw_data = self._receive_data(raw_data["next_uri"])
4045
yield raw_data
4146
except (Exception, KeyboardInterrupt):
4247
self.disconnect()
@@ -57,7 +62,8 @@ def _receive_result(self, query, query_id=None, with_column_types=False):
5762
helper.check_error()
5863
gen = self._data_generator(raw_data)
5964
result = self.query_result_cls(
60-
gen, raw_data, with_column_types=with_column_types, **self.result_config)
65+
gen, raw_data, with_column_types=with_column_types, **self.result_config
66+
)
6167
return result.get_result()
6268

6369
def _iter_receive_result(self, query, query_id=None, with_column_types=False):
@@ -67,14 +73,16 @@ def _iter_receive_result(self, query, query_id=None, with_column_types=False):
6773
helper.check_error()
6874
gen = self._data_generator(raw_data)
6975
result = self.query_result_cls(
70-
gen, raw_data, with_column_types=with_column_types, **self.result_config)
76+
gen, raw_data, with_column_types=with_column_types, **self.result_config
77+
)
7178
_, rows = result.get_result()
7279
for row in rows:
7380
for r in row:
7481
yield r
7582

76-
def execute(self, query, params=None, with_column_types=False,
77-
query_id=None, settings=None):
83+
def execute(
84+
self, query, params=None, with_column_types=False, query_id=None, settings=None
85+
):
7886
"""
7987
Executes query.
8088
:param query: query that will be send to server.
@@ -112,52 +120,63 @@ def execute(self, query, params=None, with_column_types=False,
112120
return [], rv
113121

114122
column_types, rv = self._process_ordinary_query(
115-
query, params=params, with_column_types=with_column_types,
116-
query_id=query_id)
123+
query, params=params, with_column_types=with_column_types, query_id=query_id
124+
)
117125
return column_types, rv
118126

119127
# params = [(1,),(2,)] or params = [(1,2),(2,3)]
120128
def _process_insert_query(self, query, params):
121129
insert_rows = 0
122130
if "values" in query:
123-
query = query.split("values")[0] + 'values'
131+
query = query.split("values")[0] + "values"
124132
elif "VALUES" in query:
125-
query = query.split("VALUES")[0] + 'VALUES'
126-
if len(query.split(' ')) < 3:
133+
query = query.split("VALUES")[0] + "VALUES"
134+
if len(query.split(" ")) < 3:
127135
raise Exception("Not standard insert/replace statement")
128-
table_name = query.split(' ')[2]
129-
batch_size = query.count(',') + 1
136+
table_name = query.split(" ")[2]
137+
batch_size = query.count(",") + 1
130138
if params is not None and len(params) > 0:
131139
if isinstance(params[0], tuple):
132140
tuple_ls = params
133141
else:
134-
tuple_ls = [tuple(params[i:i + batch_size]) for i in range(0, len(params), batch_size)]
142+
tuple_ls = [
143+
tuple(params[i : i + batch_size])
144+
for i in range(0, len(params), batch_size)
145+
]
135146
insert_rows = len(tuple_ls)
136147
self._uploader.upload_to_table_by_copy(table_name, tuple_ls)
137148
return insert_rows
138149

139-
def _process_ordinary_query(self, query, params=None, with_column_types=False,
140-
query_id=None):
150+
def _process_ordinary_query(
151+
self, query, params=None, with_column_types=False, query_id=None
152+
):
141153
if params is not None:
142-
query = self._substitute_params(
143-
query, params, self.connection.context
144-
)
145-
return self._receive_result(query, query_id=query_id, with_column_types=with_column_types, )
146-
147-
def execute_iter(self, query, params=None, with_column_types=False,
148-
query_id=None, settings=None):
154+
query = self._substitute_params(query, params, self.connection.context)
155+
return self._receive_result(
156+
query,
157+
query_id=query_id,
158+
with_column_types=with_column_types,
159+
)
160+
161+
def execute_iter(
162+
self, query, params=None, with_column_types=False, query_id=None, settings=None
163+
):
149164
if params is not None:
150-
query = self._substitute_params(
151-
query, params, self.connection.context
152-
)
153-
return self._iter_receive_result(query, query_id=query_id, with_column_types=with_column_types)
154-
155-
def _iter_process_ordinary_query(self, query, with_column_types=False, query_id=None):
156-
return self._iter_receive_result(query, query_id=query_id, with_column_types=with_column_types)
165+
query = self._substitute_params(query, params, self.connection.context)
166+
return self._iter_receive_result(
167+
query, query_id=query_id, with_column_types=with_column_types
168+
)
169+
170+
def _iter_process_ordinary_query(
171+
self, query, with_column_types=False, query_id=None
172+
):
173+
return self._iter_receive_result(
174+
query, query_id=query_id, with_column_types=with_column_types
175+
)
157176

158177
def _substitute_params(self, query, params, context):
159178
if not isinstance(params, dict):
160-
raise ValueError('Parameters are expected in dict form')
179+
raise ValueError("Parameters are expected in dict form")
161180

162181
escaped = escape_params(params, context)
163182
return query % escaped
@@ -186,59 +205,59 @@ def from_url(cls, url):
186205
continue
187206

188207
timeouts = {
189-
'connect_timeout',
190-
'read_timeout',
191-
'send_receive_timeout',
192-
'sync_request_timeout'
208+
"connect_timeout",
209+
"read_timeout",
210+
"send_receive_timeout",
211+
"sync_request_timeout",
193212
}
194213

195214
value = value[0]
196215

197-
if name == 'client_name':
216+
if name == "client_name":
198217
kwargs[name] = value
199-
elif name == 'tenant':
218+
elif name == "tenant":
200219
kwargs[name] = value
201-
elif name == 'warehouse':
220+
elif name == "warehouse":
202221
kwargs[name] = value
203-
elif name == 'secure':
222+
elif name == "secure":
204223
kwargs[name] = asbool(value)
205-
elif name == 'copy_purge':
224+
elif name == "copy_purge":
206225
kwargs[name] = asbool(value)
207226
settings[name] = asbool(value)
208-
elif name == 'debug':
227+
elif name == "debug":
209228
settings[name] = asbool(value)
210-
elif name == 'compress':
229+
elif name == "compress":
211230
settings[name] = asbool(value)
212231
elif name in timeouts:
213232
kwargs[name] = float(value)
214-
elif name == 'persist_cookies':
233+
elif name == "persist_cookies":
215234
kwargs[name] = asbool(value)
216-
elif name == 'null_to_none':
235+
elif name == "null_to_none":
217236
result_config[name] = asbool(value)
218237
else:
219238
settings[name] = value # settings={'copy_purge':False}
220239
secure = kwargs.get("secure", False)
221-
kwargs['secure'] = secure
240+
kwargs["secure"] = secure
222241

223242
host = parsed_url.hostname
224243

225244
if parsed_url.port is not None:
226-
kwargs['port'] = parsed_url.port
245+
kwargs["port"] = parsed_url.port
227246

228-
path = parsed_url.path.replace('/', '', 1)
247+
path = parsed_url.path.replace("/", "", 1)
229248
if path:
230-
kwargs['database'] = path
249+
kwargs["database"] = path
231250

232251
if parsed_url.username is not None:
233-
kwargs['user'] = unquote(parsed_url.username)
252+
kwargs["user"] = unquote(parsed_url.username)
234253

235254
if parsed_url.password is not None:
236-
kwargs['password'] = unquote(parsed_url.password)
255+
kwargs["password"] = unquote(parsed_url.password)
237256

238257
if settings:
239-
kwargs['settings'] = settings
258+
kwargs["settings"] = settings
240259
if result_config:
241-
kwargs['result_config'] = result_config
260+
kwargs["result_config"] = result_config
242261

243262
return cls(host, **kwargs)
244263

@@ -250,7 +269,9 @@ def insert(self, database_name, table_name, data):
250269
data: the data which write into, it's a list of tuple
251270
"""
252271
# TODO: escape the database & table name
253-
self._uploader.upload_to_table_by_copy("%s.%s" % (database_name, table_name), data)
272+
self._uploader.upload_to_table_by_copy(
273+
"%s.%s" % (database_name, table_name), data
274+
)
254275

255276
def replace(self, database_name, table_name, conflict_keys, data):
256277
"""
@@ -260,7 +281,9 @@ def replace(self, database_name, table_name, conflict_keys, data):
260281
conflict_keys: the key that use to replace into
261282
data: the data which write into, it's a list of tuple
262283
"""
263-
self._uploader.replace_into_table("%s.%s" % (database_name, table_name), conflict_keys, data)
284+
self._uploader.replace_into_table(
285+
"%s.%s" % (database_name, table_name), conflict_keys, data
286+
)
264287

265288
def upload_to_stage(self, stage_dir, file_name, data):
266289
"""

0 commit comments

Comments
 (0)