1
use canyon_connection::{datasources::Migrations as MigrationsStatus, DATASOURCES};
2
use canyon_crud::rows::CanyonRows;
3
use canyon_entities::CANYON_REGISTER_ENTITIES;
4
use partialdebug::placeholder::PartialDebug;
5

            
6
use crate::{
7
    canyon_crud::{
8
        bounds::{Column, Row, RowOperations},
9
        crud::Transaction,
10
        DatabaseType,
11
    },
12
    constants,
13
    migrations::{
14
        information_schema::{ColumnMetadata, ColumnMetadataTypeValue, TableMetadata},
15
        memory::CanyonMemory,
16
        processor::MigrationsProcessor,
17
    },
18
};
19

            
20
#[derive(PartialDebug)]
21
pub struct Migrations;
22
// Makes this structure able to make queries to the database
23
impl Transaction<Self> for Migrations {}
24

            
25
impl Migrations {
26
    /// Launches the mechanism to parse the Database schema, the Canyon register
27
    /// and the database table with the memory of Canyon to perform the
28
    /// migrations over the targeted database
29
    pub async fn migrate() {
30
        for datasource in DATASOURCES.iter() {
31
            if datasource
32
                .properties
33
                .migrations
34
                .filter(|status| !status.eq(&MigrationsStatus::Disabled))
35
                .is_none()
36
            {
37
                println!(
38
                    "Skipped datasource: {:?} for being disabled (or not configured)",
39
                    datasource.name
40
                );
41
                continue;
42
            }
43
            println!(
44
                "Processing migrations for datasource: {:?}",
45
                datasource.name
46
            );
47

            
48
            let mut migrations_processor = MigrationsProcessor::default();
49

            
50
            let canyon_entities = CANYON_REGISTER_ENTITIES.lock().unwrap().to_vec();
51
            let canyon_memory = CanyonMemory::remember(datasource, &canyon_entities).await;
52

            
53
            // Tracked entities that must be migrated whenever Canyon starts
54
            let schema_status =
55
                Self::fetch_database(&datasource.name, datasource.get_db_type()).await;
56
            let database_tables_schema_info =
57
                Self::map_rows(schema_status, datasource.get_db_type());
58

            
59
            // We filter the tables from the schema that aren't Canyon entities
60
            let mut user_database_tables = vec![];
61
            for parsed_table in database_tables_schema_info.iter() {
62
                if canyon_memory
63
                    .memory
64
                    .iter()
65
                    .any(|f| f.declared_table_name.eq(&parsed_table.table_name))
66
                    || canyon_memory
67
                        .renamed_entities
68
                        .values()
69
                        .any(|f| *f == parsed_table.table_name)
70
                {
71
                    user_database_tables.append(&mut vec![parsed_table]);
72
                }
73
            }
74

            
75
            migrations_processor
76
                .process(
77
                    canyon_memory,
78
                    canyon_entities,
79
                    user_database_tables,
80
                    datasource,
81
                )
82
                .await;
83
        }
84
    }
85

            
86
    /// Fetches a concrete schema metadata by target the database
87
    /// chosen by it's datasource name property
88
    async fn fetch_database(
89
        datasource_name: &str,
90
        db_type: DatabaseType,
91
    ) -> CanyonRows<Migrations> {
92
        let query = match db_type {
93
            #[cfg(feature = "postgres")]
94
            DatabaseType::PostgreSql => constants::postgresql_queries::FETCH_PUBLIC_SCHEMA,
95
            #[cfg(feature = "mssql")]
96
            DatabaseType::SqlServer => constants::mssql_queries::FETCH_PUBLIC_SCHEMA,
97
            #[cfg(feature = "mysql")]
98
            DatabaseType::MySQL => todo!("Not implemented fetch database in mysql"),
99
        };
100

            
101
        Self::query(query, [], datasource_name)
102
            .await
103
            .unwrap_or_else(|_| {
104
                panic!(
105
                    "Error querying the schema information for the datasource: {datasource_name}"
106
                )
107
            })
108
    }
109

            
110
    /// Handler for parse the result of query the information of some database schema,
111
    /// and extract the content of the returned rows into custom structures with
112
    /// the data well organized for every entity present on that schema
113
    fn map_rows(db_results: CanyonRows<Migrations>, db_type: DatabaseType) -> Vec<TableMetadata> {
114
        match db_results {
115
            #[cfg(feature = "postgres")]
116
            CanyonRows::Postgres(v) => Self::process_tp_rows(v, db_type),
117
            #[cfg(feature = "mssql")]
118
            CanyonRows::Tiberius(v) => Self::process_tib_rows(v, db_type),
119
            _ => panic!(),
120
        }
121
    }
122

            
123
    /// Parses all the [`Row`] after query the information of the targeted schema,
124
    /// grouping them in [`TableMetadata`] structs, by relating every [`Row`] that has
125
    /// the same "table_name" (asked with column.name()) being one field of the new
126
    /// [`TableMetadata`], and parsing the other columns that belongs to that entity
127
    /// and appending as a new [`ColumnMetadata`] element to the columns field.
128
    fn get_columns_metadata(res_row: &dyn Row, table: &mut TableMetadata) {
129
        let mut entity_column = ColumnMetadata::default();
130
        for column in res_row.columns().iter() {
131
            if column.name() != "table_name" {
132
                Self::set_column_metadata(res_row, column, &mut entity_column);
133
            } // Discards the column "table_name", 'cause is already a field of [`TableMetadata`]
134
        }
135
        table.columns.push(entity_column);
136
    }
137

            
138
    /// Sets the concrete value for a field of a [`ColumnMetadata`], by reading the properties
139
    /// of the source [`Column`], filtering the target value by the source property `column name`
140
    fn set_column_metadata(row: &dyn Row, src: &Column, dest: &mut ColumnMetadata) {
141
        let column_identifier = src.name();
142
        let column_value = ColumnMetadataTypeValue::get_value(row, src);
143

            
144
        if column_identifier == "column_name" {
145
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
146
                dest.column_name = value
147
                    .to_owned()
148
                    .expect("[MIGRATIONS - set_column_metadata -> column_name]")
149
            }
150
        } else if column_identifier == "data_type" {
151
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
152
                dest.datatype = value
153
                    .to_owned()
154
                    .expect("[MIGRATIONS - set_column_metadata -> data_type]")
155
            }
156
        } else if column_identifier == "character_maximum_length" {
157
            if let ColumnMetadataTypeValue::IntValue(value) = &column_value {
158
                dest.character_maximum_length = value.to_owned()
159
            }
160
        } else if column_identifier == "is_nullable" {
161
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
162
                dest.is_nullable = matches!(
163
                    value
164
                        .as_ref()
165
                        .expect("[MIGRATIONS - set_column_metadata -> is_nullable]")
166
                        .as_str(),
167
                    "YES"
168
                )
169
            }
170
        } else if column_identifier == "column_default" {
171
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
172
                dest.column_default = value.to_owned()
173
            }
174
        } else if column_identifier == "foreign_key_info" {
175
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
176
                dest.foreign_key_info = value.to_owned()
177
            }
178
        } else if column_identifier == "foreign_key_name" {
179
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
180
                dest.foreign_key_name = value.to_owned()
181
            }
182
        } else if column_identifier == "primary_key_info" {
183
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
184
                dest.primary_key_info = value.to_owned()
185
            }
186
        } else if column_identifier == "primary_key_name" {
187
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
188
                dest.primary_key_name = value.to_owned()
189
            }
190
        } else if column_identifier == "is_identity" {
191
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
192
                dest.is_identity = matches!(
193
                    value
194
                        .as_ref()
195
                        .expect("[MIGRATIONS - set_column_metadata -> is_identity]")
196
                        .as_str(),
197
                    "YES"
198
                )
199
            }
200
        } else if column_identifier == "identity_generation" {
201
            if let ColumnMetadataTypeValue::StringValue(value) = &column_value {
202
                dest.identity_generation = value.to_owned()
203
            }
204
        };
205
    }
206

            
207
    #[cfg(feature = "postgres")]
208
    fn process_tp_rows(
209
        db_results: Vec<tokio_postgres::Row>,
210
        db_type: DatabaseType,
211
    ) -> Vec<TableMetadata> {
212
        let mut schema_info: Vec<TableMetadata> = Vec::new();
213
        for res_row in db_results.iter() {
214
            let unique_table = schema_info
215
                .iter_mut()
216
                .find(|table| check_for_table_name(table, db_type, res_row as &dyn Row));
217
            match unique_table {
218
                Some(table) => {
219
                    /* If a table entity it's already present on the collection, we add it
220
                    the founded columns related to the table */
221
                    Self::get_columns_metadata(res_row as &dyn Row, table);
222
                }
223
                None => {
224
                    /* If there's no table for a given "table_name" property on the
225
                    collection yet, we must create a new instance and attach it
226
                    the founded columns data in this iteration */
227
                    let mut new_table = TableMetadata {
228
                        table_name: get_table_name_from_tp_row(res_row),
229
                        columns: Vec::new(),
230
                    };
231
                    Self::get_columns_metadata(res_row as &dyn Row, &mut new_table);
232
                    schema_info.push(new_table);
233
                }
234
            };
235
        }
236

            
237
        schema_info
238
    }
239

            
240
    #[cfg(feature = "mssql")]
241
    fn process_tib_rows(
242
        db_results: Vec<tiberius::Row>,
243
        db_type: DatabaseType,
244
    ) -> Vec<TableMetadata> {
245
        let mut schema_info: Vec<TableMetadata> = Vec::new();
246
        for res_row in db_results.iter() {
247
            let unique_table = schema_info
248
                .iter_mut()
249
                .find(|table| check_for_table_name(table, db_type, res_row as &dyn Row));
250
            match unique_table {
251
                Some(table) => {
252
                    /* If a table entity it's already present on the collection, we add it
253
                    the founded columns related to the table */
254
                    Self::get_columns_metadata(res_row as &dyn Row, table);
255
                }
256
                None => {
257
                    /* If there's no table for a given "table_name" property on the
258
                    collection yet, we must create a new instance and attach it
259
                    the founded columns data in this iteration */
260
                    let mut new_table = TableMetadata {
261
                        table_name: get_table_name_from_tib_row(res_row),
262
                        columns: Vec::new(),
263
                    };
264
                    Self::get_columns_metadata(res_row as &dyn Row, &mut new_table);
265
                    schema_info.push(new_table);
266
                }
267
            };
268
        }
269

            
270
        schema_info
271
    }
272
}
273

            
274
#[cfg(feature = "postgres")]
275
fn get_table_name_from_tp_row(res_row: &tokio_postgres::Row) -> String {
276
    res_row.get::<&str, String>("table_name")
277
}
278
#[cfg(feature = "mssql")]
279
fn get_table_name_from_tib_row(res_row: &tiberius::Row) -> String {
280
    res_row
281
        .get::<&str, &str>("table_name")
282
        .unwrap_or_default()
283
        .to_string()
284
}
285

            
286
fn check_for_table_name(
287
    table: &&mut TableMetadata,
288
    db_type: DatabaseType,
289
    res_row: &dyn Row,
290
) -> bool {
291
    match db_type {
292
        #[cfg(feature = "postgres")]
293
        DatabaseType::PostgreSql => table.table_name == res_row.get_postgres::<&str>("table_name"),
294
        #[cfg(feature = "mssql")]
295
        DatabaseType::SqlServer => table.table_name == res_row.get_mssql::<&str>("table_name"),
296
        #[cfg(feature = "mysql")]
297
        DatabaseType::MySQL => todo!(),
298
    }
299
}