Skip to content
This repository was archived by the owner on Dec 22, 2020. It is now read-only.

Commit 9e10bfb

Browse files
committed
allow subtables of composite key tables
1 parent 2d7950d commit 9e10bfb

File tree

2 files changed

+63
-50
lines changed

2 files changed

+63
-50
lines changed

lib/mosql/schema.rb

Lines changed: 62 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -54,15 +54,21 @@ def check_columns!(ns, spec)
5454
end
5555
end
5656

57-
def parse_spec(ns, spec, source=[])
57+
def parent_scope_column(parent, colname)
58+
(parent.to_s.singularize + "_" + colname.to_s).to_sym
59+
end
60+
61+
def parse_spec(ns, spec, source=[], parent_pks=[])
5862
out = spec.dup
5963
out[:columns] = to_array(spec.delete(:columns))
6064
meta = spec.delete(:meta)
65+
pks = parent_pks + primary_sql_keys_for_schema(out).map { |k| parent_scope_column(meta[:table], k) }
66+
6167
out[:subtables] = spec.map do |name, subspec|
6268
newsource = source + [name]
63-
subspec = parse_spec(ns , subspec, newsource)
69+
subspec = parse_spec(ns , subspec, newsource, pks)
6470
subspec[:meta][:source] = newsource
65-
subspec[:meta][:parent_fkey] = (meta[:table].to_s.singularize + "_id").to_sym
71+
subspec[:meta][:parent_fkeys] = pks
6672
subspec
6773
end
6874
check_columns!(ns, out)
@@ -103,12 +109,11 @@ def qualified_table_name(meta)
103109
end
104110
end
105111

106-
def create_table(db, spec, clobber, parent_table=nil, parent_pk_type = nil)
112+
def create_table(db, spec, clobber, parent_table=nil, parent_pks={})
107113
meta = spec[:meta]
108114
table_name = qualified_table_name(meta)
109115
composite_key = meta[:composite_key]
110-
keys = []
111-
keytypes = []
116+
primary_keys = {}
112117
log.info("Creating table #{db.literal(table_name)}...")
113118
db.drop_table?(table_name, :cascade => true) if clobber
114119
db.create_table(table_name) do
@@ -129,11 +134,9 @@ def create_table(db, spec, clobber, parent_table=nil, parent_pk_type = nil)
129134
column col[:name], col[:type], opts
130135

131136
if composite_key and composite_key.include?(col[:name])
132-
keys << col[:name].to_sym
133-
keytypes << col[:type]
137+
primary_keys[col[:name].to_sym] = col[:type]
134138
elsif not composite_key and col[:source].to_sym == :_id
135-
keys << col[:name].to_sym
136-
keytypes << col[:type]
139+
primary_keys[col[:name].to_sym] = col[:type]
137140
end
138141
end
139142

@@ -151,20 +154,20 @@ def create_table(db, spec, clobber, parent_table=nil, parent_pk_type = nil)
151154
end
152155

153156
if !parent_table.nil?
154-
foreign_key meta[:parent_fkey], parent_table, {
155-
:type => parent_pk_type,
157+
parent_pks.each do |k, type|
158+
column k, type
159+
end
160+
foreign_key parent_pks.keys, parent_table, {
156161
:on_delete => :cascade,
157162
:on_update => :cascade
158163
}
159-
keys << meta[:parent_fkey]
160-
keytypes << parent_pk_type
161164
end
162-
primary_key keys
165+
primary_key primary_keys.keys + parent_pks.keys
163166
end
164167

168+
parent_pks = Hash[primary_keys.map { |k, t| [parent_scope_column(meta[:table], k), t] }].merge(parent_pks)
165169
spec[:subtables].each do |subspec|
166-
raise "Too many keys for sub table in #{table_name}: #{keys}" unless keys.length == 1
167-
create_table(db, subspec, clobber, table_name, keytypes.first)
170+
create_table(db, subspec, clobber, table_name, parent_pks)
168171
end
169172
end
170173

@@ -276,17 +279,41 @@ def transform_primitive(v, type)
276279
end
277280
end
278281

279-
def transform_one(schema, obj)
282+
def transform_value(col, v)
283+
case v
284+
when Hash
285+
JSON.dump(v)
286+
when Array
287+
if col[:array_type]
288+
v = v.map { |it| transform_primitive(it, col[:array_type]) }
289+
Sequel.pg_array(v, col[:array_type])
290+
else
291+
JSON.dump(v)
292+
end
293+
else
294+
transform_primitive(v, col[:type])
295+
end
296+
end
297+
298+
def get_pks_for_debug(schema, obj, parent_pks={})
299+
pks = parent_pks.clone
300+
sql_pks = primary_sql_keys_for_schema(schema)
301+
schema[:columns].each do |col|
302+
break unless sql_pks.include?(col[:name])
303+
304+
pks[col[:name]] = bson_dig_dotted(obj, col[:source])
305+
end
306+
pks
307+
end
308+
309+
def transform_one(schema, obj, parent_pks={})
280310
original = obj
281311

282312
# Do a deep clone, because we're potentially going to be
283313
# mutating embedded objects.
284314
obj = BSON.deserialize(BSON.serialize(obj))
285315

286-
row = {}
287-
sql_pks = primary_sql_keys_for_schema(schema)
288-
pk_cols = schema[:columns].select{ |c| sql_pks.include?(c[:name]) }
289-
pks = Hash[pk_cols.map { |c| [c[:name], bson_dig_dotted(obj, c[:source])] }]
316+
row = parent_pks.clone
290317
schema[:columns].each do |col|
291318
source = col[:source]
292319
type = col[:type]
@@ -295,35 +322,22 @@ def transform_one(schema, obj)
295322
if source.start_with?("$")
296323
v = fetch_special_source(obj, source, original)
297324
else
298-
v = fetch_and_delete_dotted(obj, source)
299-
case v
300-
when Hash
301-
v = JSON.dump(v)
302-
when Array
303-
if col[:array_type]
304-
v = v.map { |it| transform_primitive(it, col[:array_type]) }
305-
v = Sequel.pg_array(v, col[:array_type])
306-
else
307-
v = JSON.dump(v)
308-
end
309-
else
310-
v = transform_primitive(v, type)
311-
end
325+
v = transform_value(col, fetch_and_delete_dotted(obj, source))
312326
end
313327

314328
null_allowed = !col[:notnull] or col.has_key?(:default)
315329
if v.nil? and not null_allowed
316-
raise "Invalid null #{source.inspect} for #{pks.inspect}"
330+
raise "Invalid null #{source.inspect} for #{get_pks_for_debug(schema, obj, parent_pks)}"
317331
elsif v.is_a? Sequel::SQL::Blob and type != "bytea"
318-
raise "Failed to convert binary #{source.inspect} to #{type.inspect} for #{pks.inspect}"
319-
elsif col[:array_type]
332+
raise "Failed to convert binary #{source.inspect} to #{type.inspect} for #{get_pks_for_debug(schema, obj, parent_pks)}"
333+
elsif col[:array_type] and not v.nil?
320334
v.each_with_index do |e, i|
321335
if not sanity_check_type(e, col[:array_type])
322-
raise "Failed to convert array element #{i} of #{source.inspect} to #{type.inspect}: got #{e.inspect} for #{pks.inspect}"
336+
raise "Failed to convert array element #{i} of #{source.inspect} to #{type.inspect}: got #{e.inspect} for #{get_pks_for_debug(schema, obj, parent_pks)}"
323337
end
324338
end
325-
elsif not sanity_check_type(v, type)
326-
raise "Failed to convert #{source.inspect} to #{type.inspect}: got #{v.inspect} for #{pks.inspect}"
339+
elsif not v.nil? and not sanity_check_type(v, type)
340+
raise "Failed to convert #{source.inspect} to #{type.inspect}: got #{v.inspect} for #{get_pks_for_debug(schema, obj, parent_pks)}"
327341
end
328342
row[name] = v
329343
end
@@ -380,11 +394,11 @@ def all_table_names_for_ns(ns)
380394

381395
def transform_one_ns(ns, obj)
382396
transform_one(find_ns!(ns), obj)
383-
384397
end
385398

386399
def save_all_pks_for_ns(ns, new, old)
387400
schema = find_ns!(ns)
401+
# We only save top level keys.
388402
primary_sql_keys = primary_sql_keys_for_schema(schema)
389403

390404
primary_sql_keys.each do |key|
@@ -413,17 +427,19 @@ def all_transforms_for_obj(schema, obj, parent_pks={}, &block)
413427

414428
# Make sure to add in the primary keys from any parent tables, since we
415429
# might not automatically have them.
416-
transformed = transform_one(schema, obj).update(parent_pks)
430+
transformed = transform_one(schema, obj, parent_pks)
417431

418432
yield table_ident, primary_keys, transformed
419433

434+
pks = Hash[primary_keys.map { |k| [
435+
parent_scope_column(schema[:meta][:table], k),
436+
transformed[k]
437+
] } ].update(parent_pks)
420438
schema[:subtables].each do |subspec|
421439
source = subspec[:meta][:source]
422440
subobjs = bson_dig(obj, *source)
423441
break if subobjs.nil?
424442

425-
raise "Too many primary keys" if primary_keys.length > 1
426-
pks = {subspec[:meta][:parent_fkey] => transformed[primary_keys[0]]}
427443
subobjs.each do |subobj|
428444
all_transforms_for_obj(subspec, subobj, pks, &block)
429445
end
@@ -452,9 +468,6 @@ def primary_sql_keys_for_schema(schema)
452468
else
453469
keys << schema[:columns].find {|c| c[:source] == '_id'}[:name]
454470
end
455-
if schema[:meta][:parent_fkey]
456-
keys << schema[:meta][:parent_fkey]
457-
end
458471

459472
return keys
460473
end

lib/mosql/streamer.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -158,7 +158,7 @@ def import_collection(ns, collection, filter)
158158
if count % BATCH == 0
159159
sql_time += upsert_all_batches(batches, ns)
160160
elapsed = Time.now - start
161-
log.info("Imported #{count} rows into #{collection} (#{elapsed}s, #{sql_time}s SQL)...")
161+
log.info("Imported #{count} rows into #{ns} (#{elapsed}s, #{sql_time}s SQL)...")
162162
exit(0) if @done
163163
end
164164
end

0 commit comments

Comments
 (0)