1
1
from contextlib import contextmanager
2
2
3
-
4
3
from . import settings
5
4
from . import pql_ast as ast
6
5
from . import pql_objects as objects
7
6
from .utils import classify
8
7
from .interpreter import Interpreter
9
- from .evaluate import cast_to_python
10
- from .interp_common import create_engine , call_pql_func
8
+ from .sql_interface import create_engine
11
9
from .pql_types import T
12
- from .pql_functions import import_pandas
13
- from .context import context
14
- from . import sql
15
10
16
11
from . import display
17
12
display .install_reprs ()
18
13
19
14
20
- def _call_pql_func (state , name , args ):
21
- with context (state = state ):
22
- count = call_pql_func (state , name , args )
23
- return cast_to_python (state , count )
24
-
25
-
26
15
class TablePromise :
27
16
"""Returned by Preql whenever the result is a table
28
17
29
18
Fetching values creates queries to database engine
30
19
"""
31
20
32
- def __init__ (self , state , inst ):
33
- self ._state = state
21
+ def __init__ (self , interp , inst ):
22
+ self ._interp = interp
34
23
self ._inst = inst
35
24
self ._rows = None
36
25
37
26
def to_json (self ):
38
27
"Returns table as a list of rows, i.e. ``[{col1: value, col2: value, ...}, ...]``"
39
28
if self ._rows is None :
40
- self ._rows = cast_to_python ( self ._state , self ._inst )
29
+ self ._rows = self ._interp . cast_to_python ( self ._inst )
41
30
assert self ._rows is not None
42
31
return self ._rows
43
32
@@ -55,30 +44,30 @@ def __eq__(self, other):
55
44
56
45
def __len__ (self ):
57
46
"Run a count query on table"
58
- return _call_pql_func (self ._state , 'count' , [self ._inst ])
47
+ count = self ._interp .call_builtin_func ('count' , [self ._inst ])
48
+ return self ._interp .cast_to_python (count )
59
49
60
50
def __iter__ (self ):
61
51
return iter (self .to_json ())
62
52
63
53
def __getitem__ (self , index ):
64
54
"Run a slice query on table"
65
- with context (state = self ._state ):
66
- if isinstance (index , slice ):
67
- offset = index .start or 0
68
- limit = index .stop - offset
69
- return call_pql_func (self ._state , 'limit_offset' , [self ._inst , ast .make_const (limit ), ast .make_const (offset )])
55
+ if isinstance (index , slice ):
56
+ offset = index .start or 0
57
+ limit = index .stop - offset
58
+ return self ._interp .call_builtin_func ('limit_offset' , [self ._inst , ast .make_const (limit ), ast .make_const (offset )])
70
59
71
- # TODO different debug log level / mode
72
- res ,= cast_to_python ( self ._state , self [index :index + 1 ])
73
- return res
60
+ # TODO different debug log level / mode
61
+ res ,= self ._interp . cast_to_python ( self [index :index + 1 ])
62
+ return res
74
63
75
64
def __repr__ (self ):
76
65
return repr (self .to_json ())
77
66
78
67
79
68
def _prepare_instance_for_user (interp , inst ):
80
69
if inst .type <= T .table :
81
- return TablePromise (interp . state , inst )
70
+ return TablePromise (interp , inst )
82
71
83
72
return interp .localize_obj (inst )
84
73
@@ -104,6 +93,7 @@ def __init__(self, db_uri: str='sqlite://:memory:', print_sql: bool=settings.pri
104
93
"""
105
94
self ._db_uri = db_uri
106
95
self ._print_sql = print_sql
96
+ self ._auto_create = auto_create
107
97
# self.engine.ping()
108
98
109
99
engine = create_engine (self ._db_uri , print_sql = self ._print_sql , auto_create = auto_create )
@@ -182,41 +172,18 @@ def commit(self):
182
172
def rollback (self ):
183
173
return self .interp .state .db .rollback ()
184
174
185
- def _drop_tables (self , * tables ):
186
- state = self .interp .state
187
- # XXX temporary. Used for testing
188
- for t in tables :
189
- t = sql ._quote (state .db .target , state .db .qualified_name (t ))
190
- state .db ._execute_sql (T .nulltype , f"DROP TABLE { t } ;" , state )
191
175
192
176
def import_pandas (self , ** dfs ):
193
177
"""Import pandas.DataFrame instances into SQL tables
194
178
195
179
Example:
196
180
>>> pql.import_pandas(a=df_a, b=df_b)
197
181
"""
198
- with self .interp .setup_context ():
199
- return list (import_pandas (self .interp .state , dfs ))
182
+ return self .interp .import_pandas (dfs )
200
183
201
184
202
185
def load_all_tables (self ):
203
- table_types = self .interp .state .db .import_table_types (self .interp .state )
204
- table_types_by_schema = classify (table_types , lambda x : x [0 ], lambda x : x [1 :])
205
-
206
- for schema_name , table_types in table_types_by_schema .items ():
207
- if schema_name :
208
- schema = objects .Module (schema_name , {})
209
- self .interp .set_var (schema_name , schema )
210
-
211
- for table_name , table_type in table_types :
212
- db_name = table_type .options ['name' ]
213
- inst = objects .new_table (table_type , db_name )
214
-
215
- if schema_name :
216
- schema .namespace [table_name ] = inst
217
- else :
218
- if not self .interp .has_var (table_name ):
219
- self .interp .set_var (table_name , inst )
186
+ return self .interp .load_all_tables ()
220
187
221
188
222
189
0 commit comments