dbkangaroo/kangaroo

Large file support

Opened this issue · 1 comments

Huge amount of database file size more than 1 G, so Kangaroo should add large file support:

  • Import from large file
  • Export to large file
  • Open large SQL file
  • Save large SQL file
/*
 * Diff meta model object, save the version info at the target object
 */
public DataVersion diff_model(MetaObject object_source, MetaObject object_target)
    requires(object_source.get_type() == object_target.get_type())
{
    Type member_type;
    string fullname;
    string prop_value;
    bool object_found = false;
    Value value_source, value_target;
    HashMap<string, string> dict_source;
    HashMap<string, string> dict_target;
    MetaObject meta_obj_source, meta_obj_target;
    ArrayList<string> list_text_source, list_text_target;
    ArrayList<MetaObject> list_meta_source, list_meta_target;
    DataVersion data_version = DataVersion.IDENTICAL;
    ObjectClass object_class = (ObjectClass)object_source.get_type().class_ref();
    object_target.clear_diff();
    // GLib.message("%s: %s => %s", object_source.character().to_string(), object_source.name, object_source.get_type().name());
    foreach (ParamSpec spec in object_class.list_properties()) {
        if ("version" == spec.name) {
            continue;
        }

        // GLib.message("%s -> %s", spec.name, spec.value_type.to_string());
        value_source = object_source.get_property(spec);
        value_target = object_target.get_property(spec);
        if (spec.value_type.is_object()) {
            if (spec.value_type.is_a(typeof(HashMap))) {            // Handle attributes
                dict_source = value_source.get_object() as HashMap<string, string>;
                dict_target = value_target.get_object() as HashMap<string, string>;
                foreach(string key in dict_source.keys) {
                    if (! dict_target.has_key(key)) {
                        fullname = "%s.%s".printf(spec.name, key);
                        object_target.append_diff(fullname, DataVersion.DELETE);
                        object_target.append_slice(fullname, dict_source[key]);
                    } else if (dict_source[key] != dict_target[key]) {
                        fullname = "%s.%s".printf(spec.name, key);
                        object_target.append_diff(fullname, DataVersion.UPDATE);
                        object_target.append_slice(fullname, dict_source[key]);
                    }
                }

                foreach(string key in dict_target.keys) {
                    if (! dict_source.has_key(key)) {
                        fullname = "%s.%s".printf(spec.name, key);
                        object_target.append_diff(fullname, DataVersion.INSERT);
                    }
                }
            } else if (spec.value_type.is_a(typeof(ArrayList))) {   // Handle child members
                member_type = (value_source.get_object() as Gee.Traversable).element_type;

                // Handle ArrayList<MetaObject>
                if (member_type.is_a(typeof(MetaObject))) {
                    list_meta_source = value_source.get_object() as ArrayList<MetaObject>;
                    list_meta_target = value_target.get_object() as ArrayList<MetaObject>;    
                    GLib.message("%s(%s): %p(%d) -> %p(%d)", spec.name, member_type.name(), list_meta_source, list_meta_source.size, list_meta_target, list_meta_target.size);
                    for(var i = 0; i < list_meta_source.size; i++) {
                        object_found = false;
                        for(var j = 0; j < list_meta_target.size; j++) {
                            if ((list_meta_source[i].schema == list_meta_target[j].schema) && (list_meta_source[i].name == list_meta_target[j].name)) {
                                GLib.message("%s.%s: %p(%s) -> %p(%s)", spec.name, list_meta_source[i].name, list_meta_source[i], list_meta_source[i].comment, list_meta_target[j], list_meta_target[j].comment);
                                if (DataVersion.UPDATE == this.diff_model(list_meta_source[i], list_meta_target[j])) {
                                    object_target.append_diff(spec.name + "." + list_meta_source[i].name, DataVersion.UPDATE);
                                }
                                object_found = true;
                                break;
                            }
                        }

                        if (! object_found) {
                            object_target.append_diff(spec.name + "." + list_meta_source[i].name, DataVersion.DELETE);
                        }
                    }

                    for(var j = 0; j < list_meta_target.size; j++) {
                        object_found = false;
                        for(var i = 0; i < list_meta_source.size; i++) {
                            if ((list_meta_source[i].schema == list_meta_target[j].schema) && (list_meta_source[i].name == list_meta_target[j].name)) {
                                object_found = true;
                                break;
                            }
                        }

                        if (! object_found) {
                            object_target.append_diff(spec.name + "." + list_meta_target[j].name, DataVersion.INSERT);
                        }
                    }    
                } else if (member_type.is_a(typeof(string))) {    // handle ArrayList<string>
                    // GLib.message("ArrayList<string>: %s.%s -> %s", object_source.name, spec.name, spec.value_type.to_string());
                    list_text_source = value_source.get_object() as ArrayList<string>;
                    list_text_target = value_target.get_object() as ArrayList<string>;
                    foreach(string text_source in list_text_source) {
                        object_found = false;
                        foreach(string text_target in list_text_target) {
                            if (text_source == text_target) {
                                object_found = true;
                                break;
                            }
                        }

                        if (! object_found) {
                            object_target.append_diff(spec.name + "." + text_source, DataVersion.DELETE);
                        }
                    }

                    foreach(string text_target in list_text_target) {
                        object_found = false;
                        foreach(string text_source in list_text_source) {
                            if (text_target == text_source) {
                                object_found = true;
                                break;
                            }
                        }

                        if (! object_found) {
                            object_target.append_diff(spec.name + "." + text_target, DataVersion.INSERT);
                        }
                    }
                }
            } else if (spec.value_type.is_a(typeof(MetaObject))) {
                meta_obj_source = value_source.get_object() as MetaObject;
                meta_obj_target = value_target.get_object() as MetaObject;
                if (DataVersion.UPDATE == this.diff_model(meta_obj_source, meta_obj_target)) {
                    object_target.append_diff(spec.name, DataVersion.UPDATE);
                }
            } else {
                if (DataVersion.UPDATE == diff_object(value_source.get_object(), value_target.get_object())) {
                    object_target.append_diff(spec.name, DataVersion.UPDATE);
                }
            }
        } else if (0 != spec.values_cmp(value_source, value_target)) {
            object_target.append_diff(spec.name, DataVersion.UPDATE);
            prop_value = value_source.strdup_contents();
            if (prop_value.has_prefix("\"") && prop_value.has_suffix("\"")) {
                prop_value = prop_value.substring(1, prop_value.length - 2);
            }
            object_target.append_slice(spec.name, prop_value);
        }
    }

    if (0 < object_target.diff().size) {
        data_version = DataVersion.UPDATE;
    }

    return data_version;
}