-
-
Notifications
You must be signed in to change notification settings - Fork 373
/
Copy pathtables.py
253 lines (211 loc) · 7.02 KB
/
tables.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
import json
from db import connection as db_conn
from db.columns import _transform_column_alter_dict
from db.deprecated.types.base import PostgresType
def _json_or_none(value):
return json.dumps(value) if value is not None else None
def get_table(table, conn):
"""
Return a dictionary describing a table of a schema.
The `table` can be given as either a "qualified name", or an OID.
The OID is the preferred identifier, since it's much more robust.
Args:
table: The table for which we want table info.
"""
return db_conn.exec_msar_func(conn, 'get_table', table).fetchone()[0]
def get_table_info(schema, conn):
"""
Return a list of dictionaries describing the tables of a schema.
The `schema` can be given as either a "qualified name", or an OID.
The OID is the preferred identifier, since it's much more robust.
Args:
schema: The schema for which we want table info.
"""
return db_conn.exec_msar_func(conn, 'get_table_info', schema).fetchone()[0]
def list_joinable_tables(table_oid, conn, max_depth):
return db_conn.exec_msar_func(conn, 'get_joinable_tables', max_depth, table_oid).fetchone()[0]
def get_preview(table_oid, column_list, conn, limit=20):
"""
Preview an imported table. Returning the records from the specified columns of the table.
Args:
table_oid: Identity of the imported table in the user's database.
column_list: List of settings describing the casts to be applied to the columns.
limit: The upper limit for the number of records to return.
Note that these casts are temporary and do not alter the data in the underlying table,
if you wish to alter these settings permanantly for the columns see tables/alter.py.
"""
transformed_column_data = [_transform_column_alter_dict(col) for col in column_list]
return db_conn.exec_msar_func(
conn, 'get_preview', table_oid, json.dumps(transformed_column_data), limit
).fetchone()[0]
def alter_table_on_database(table_oid, table_data_dict, conn):
"""
Alter the name, description, or columns of a table, returning name of the altered table.
Args:
table_oid: The OID of the table to be altered.
table_data_dict: A dict describing the alterations to make.
table_data_dict should have the form:
{
"name": <str>,
"description": <str>,
"columns": <list> of column_data describing columns to alter.
}
"""
return db_conn.exec_msar_func(
conn, 'alter_table', table_oid, json.dumps(table_data_dict)
).fetchone()[0]
def create_table_on_database(
table_name,
schema_oid,
conn,
column_data_list=[],
constraint_data_list=[],
owner_oid=None,
comment=None
):
"""
Creates a table with a default id column.
Args:
table_name: Name of the table to be created.
schema_oid: The OID of the schema where the table will be created.
columns: The columns dict for the new table, in order. (optional)
constraints: The constraints dict for the new table. (optional)
owner_oid: The OID of the role who will own the new table.(optional)
comment: The comment for the new table. (optional)
Returns:
Returns the OID and name of the created table.
"""
return db_conn.exec_msar_func(
conn,
'add_mathesar_table',
schema_oid,
table_name,
json.dumps(column_data_list),
json.dumps(constraint_data_list),
owner_oid,
comment
).fetchone()[0]
def prepare_table_for_import(
table_name,
schema_oid,
column_names,
header,
conn,
delimiter=None,
escapechar=None,
quotechar=None,
encoding=None,
comment=None
):
"""
This method creates a Postgres table in the specified schema, with all
columns being String type.
Returns the copy_sql and table_oid for carrying out import into the created table.
"""
column_data_list = [
{
"name": column_name,
"type": {"name": PostgresType.TEXT.id}
} for column_name in column_names
]
import_info = db_conn.exec_msar_func(
conn,
'prepare_table_for_import',
schema_oid,
table_name,
json.dumps(column_data_list),
header,
delimiter,
escapechar,
quotechar,
encoding,
comment
).fetchone()[0]
return (
import_info['copy_sql'],
import_info['table_oid'],
import_info['table_name']
)
def drop_table_from_database(table_oid, conn, cascade=False):
"""
Drop a table.
Args:
table_oid: OID of the table to drop.
cascade: Whether to drop the dependent objects.
Returns:
Returns the fully qualified name of the dropped table.
"""
return db_conn.exec_msar_func(
conn, 'drop_table', table_oid, cascade
).fetchone()[0]
def infer_table_column_data_types(conn, table_oid):
"""
Infer the best type for each column in the table.
Currently we only suggest different types for columns which originate
as type `text`.
Args:
tab_id: The OID of the table whose columns we're inferring types for.
The response JSON will have attnum keys, and values will be the
result of `format_type` for the inferred type of each column.
Restricted to columns to which the user has access.
"""
return db_conn.exec_msar_func(
conn, 'infer_table_column_data_types', table_oid
).fetchone()[0]
def move_columns_to_referenced_table(
conn, source_table_oid, target_table_oid, move_column_attnums
):
db_conn.exec_msar_func(
conn,
'move_columns_to_referenced_table',
source_table_oid,
target_table_oid,
move_column_attnums
)
def split_table(
conn,
old_table_oid,
extracted_column_attnums,
extracted_table_name,
relationship_fk_column_name=None
):
extracted_table_oid, new_fkey_attnum = db_conn.exec_msar_func(
conn,
'extract_columns_from_table',
old_table_oid,
extracted_column_attnums,
extracted_table_name,
relationship_fk_column_name
).fetchone()[0]
return {
'extracted_table_oid': extracted_table_oid,
'new_fkey_attnum': new_fkey_attnum
}
def fetch_table_in_chunks(
conn,
table_oid,
limit=None,
offset=None,
order=None,
filter=None,
with_column_header=True,
batch_size=2000
):
with conn.transaction():
with db_conn.exec_msar_func_server_cursor(
conn,
'get_table_columns_and_records',
table_oid,
limit,
offset,
_json_or_none(order),
_json_or_none(filter),
) as server_cursor:
if with_column_header:
columns = server_cursor.fetchone()[0]
yield columns
while True:
records = server_cursor.fetchmany(batch_size)
if not records:
break
yield [record[0] for record in records]