Learn how to build sophisticated tabular models using KerasFactory layers. This tutorial covers advanced architectures, design patterns, and optimization techniques.
fromkerasfactory.layersimportGatedResidualNetworkdefcreate_residual_model(input_dim,num_classes):"""Create a residual model with skip connections."""inputs=keras.Input(shape=(input_dim,))# Initial processingx=DifferentiableTabularPreprocessor()(inputs)# Residual blocksx=GatedResidualNetwork(units=64,dropout_rate=0.1)(x)x=GatedResidualNetwork(units=64,dropout_rate=0.1)(x)x=GatedResidualNetwork(units=64,dropout_rate=0.1)(x)# Skip connectionx=keras.layers.Add()([inputs,x])# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usageresidual_model=create_residual_model(input_dim=20,num_classes=3)
fromkerasfactory.layersimportColumnAttention,RowAttentiondefcreate_column_row_attention_model(input_dim,num_classes):"""Create a model with column and row attention."""inputs=keras.Input(shape=(input_dim,))# Column attention (feature-level)x=ColumnAttention(hidden_dim=64,dropout=0.1)(inputs)# Row attention (sample-level)x=RowAttention(hidden_dim=64,dropout=0.1)(x)# Feature fusionx=GatedFeatureFusion(hidden_dim=128)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usagecolumn_row_model=create_column_row_attention_model(input_dim=20,num_classes=3)
🔄 Residual and Gated Networks
1. Gated Residual Network
1 2 3 4 5 6 7 8 910111213141516171819202122
fromkerasfactory.layersimportGatedResidualNetwork,GatedLinearUnitdefcreate_gated_residual_model(input_dim,num_classes):"""Create a gated residual network model."""inputs=keras.Input(shape=(input_dim,))# Gated residual blocksx=GatedResidualNetwork(units=64,dropout_rate=0.1)(inputs)x=GatedResidualNetwork(units=64,dropout_rate=0.1)(x)x=GatedResidualNetwork(units=64,dropout_rate=0.1)(x)# Gated linear unitx=GatedLinearUnit(units=64)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usagegated_residual_model=create_gated_residual_model(input_dim=20,num_classes=3)
fromkerasfactory.layersimportTransformerBlockdefcreate_transformer_model(input_dim,num_classes):"""Create a transformer-based model."""inputs=keras.Input(shape=(input_dim,))# Transformer blocksx=TransformerBlock(dim_model=64,num_heads=4,ff_units=128,dropout_rate=0.1)(inputs)x=TransformerBlock(dim_model=64,num_heads=4,ff_units=128,dropout_rate=0.1)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usagetransformer_model=create_transformer_model(input_dim=20,num_classes=3)
🎯 Ensemble Methods
1. Mixture of Experts
1 2 3 4 5 6 7 8 91011121314151617181920212223
fromkerasfactory.layersimportTabularMoELayerdefcreate_moe_model(input_dim,num_classes):"""Create a mixture of experts model."""inputs=keras.Input(shape=(input_dim,))# Mixture of expertsx=TabularMoELayer(num_experts=4,expert_units=16)(inputs)# Additional processingx=GatedFeatureFusion(hidden_dim=128)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usagemoe_model=create_moe_model(input_dim=20,num_classes=3)
2. Boosting Ensemble
1 2 3 4 5 6 7 8 9101112131415161718192021
fromkerasfactory.layersimportBoostingEnsembleLayerdefcreate_boosting_model(input_dim,num_classes):"""Create a boosting ensemble model."""inputs=keras.Input(shape=(input_dim,))# Boosting ensemblex=BoostingEnsembleLayer(num_learners=3,learner_units=64,hidden_activation='relu')(inputs)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usageboosting_model=create_boosting_model(input_dim=20,num_classes=3)
fromkerasfactory.layersimport(NumericalAnomalyDetection,CategoricalAnomalyDetectionLayer)defcreate_anomaly_detection_model(input_dim,num_classes):"""Create a model with anomaly detection."""inputs=keras.Input(shape=(input_dim,))# Anomaly detectionnumerical_anomalies=NumericalAnomalyDetection()(inputs)categorical_anomalies=CategoricalAnomalyDetectionLayer()(inputs)# Main processingx=VariableSelection(hidden_dim=64)(inputs)x=TabularAttention(num_heads=8)(x)x=GatedFeatureFusion(hidden_dim=128)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,[outputs,numerical_anomalies,categorical_anomalies])# Usageanomaly_model=create_anomaly_detection_model(input_dim=20,num_classes=3)
fromkerasfactory.layersimportBusinessRulesLayerdefcreate_business_rules_model(input_dim,num_classes,rules):"""Create a model with business rules integration."""inputs=keras.Input(shape=(input_dim,))# Business rules layerx=BusinessRulesLayer(rules=rules,feature_type='numerical',trainable_weights=True)(inputs)# Additional processingx=VariableSelection(hidden_dim=64)(x)x=TabularAttention(num_heads=8)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usagerules=[{'feature':'age','operator':'>','value':18,'weight':1.0},{'feature':'income','operator':'>','value':50000,'weight':0.8}]business_model=create_business_rules_model(input_dim=20,num_classes=3,rules=rules)
⚡ Performance Optimization
1. Memory-Efficient Model
1 2 3 4 5 6 7 8 91011121314151617
defcreate_memory_efficient_model(input_dim,num_classes):"""Create a memory-efficient model."""inputs=keras.Input(shape=(input_dim,))# Use smaller dimensionsx=VariableSelection(hidden_dim=32)(inputs)x=TabularAttention(num_heads=4,key_dim=32)(x)x=GatedFeatureFusion(hidden_dim=64)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usagememory_efficient_model=create_memory_efficient_model(input_dim=20,num_classes=3)
2. Speed-Optimized Model
1 2 3 4 5 6 7 8 910111213141516
defcreate_speed_optimized_model(input_dim,num_classes):"""Create a speed-optimized model."""inputs=keras.Input(shape=(input_dim,))# Use fewer layers and smaller dimensionsx=VariableSelection(hidden_dim=32)(inputs)x=TabularAttention(num_heads=4,key_dim=32)(x)# Outputoutputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)# Usagespeed_optimized_model=create_speed_optimized_model(input_dim=20,num_classes=3)
3. Mixed Precision Training
1 2 3 4 5 6 7 8 91011121314151617181920
# Enable mixed precisionkeras.mixed_precision.set_global_policy('mixed_float16')defcreate_mixed_precision_model(input_dim,num_classes):"""Create a mixed precision model."""inputs=keras.Input(shape=(input_dim,))# Use mixed precision layersx=VariableSelection(hidden_dim=64)(inputs)x=TabularAttention(num_heads=8,key_dim=64)(x)x=GatedFeatureFusion(hidden_dim=128)(x)# Output (use float32 for final layer)outputs=keras.layers.Dense(num_classes,activation='softmax',dtype='float32')(x)returnkeras.Model(inputs,outputs)# Usagemixed_precision_model=create_mixed_precision_model(input_dim=20,num_classes=3)
🔧 Model Compilation and Training
1. Advanced Compilation
1 2 3 4 5 6 7 8 910111213141516171819202122232425
defcompile_model(model,learning_rate=0.001):"""Compile model with advanced settings."""# Learning rate schedulinglr_schedule=keras.optimizers.schedules.ExponentialDecay(initial_learning_rate=learning_rate,decay_steps=1000,decay_rate=0.9)# Compile with advanced optimizermodel.compile(optimizer=keras.optimizers.Adam(learning_rate=lr_schedule,clipnorm=1.0),loss='categorical_crossentropy',metrics=['accuracy'])returnmodel# Usagemodel=create_sequential_model(input_dim=20,num_classes=3)model=compile_model(model,learning_rate=0.001)
deftrain_model(model,X_train,y_train,X_val,y_val):"""Train model with advanced callbacks."""# Callbackscallbacks=[keras.callbacks.EarlyStopping(monitor='val_loss',patience=10,restore_best_weights=True),keras.callbacks.ReduceLROnPlateau(monitor='val_loss',factor=0.5,patience=5,min_lr=1e-7),keras.callbacks.ModelCheckpoint('best_model.h5',monitor='val_loss',save_best_only=True)]# Trainhistory=model.fit(X_train,y_train,validation_data=(X_val,y_val),epochs=100,batch_size=32,callbacks=callbacks,verbose=1)returnhistory# Usagehistory=train_model(model,X_train,y_train,X_val,y_val)
definterpret_model(model,X_test,layer_name='tabular_attention'):"""Interpret model using attention weights."""# Get attention weightsattention_model=keras.Model(inputs=model.input,outputs=model.get_layer(layer_name).output)attention_weights=attention_model.predict(X_test)# Analyze attention patternsmean_attention=np.mean(attention_weights,axis=0)print("Mean attention weights:",mean_attention)returnattention_weights# Usageattention_weights=interpret_model(model,X_test)
📚 Next Steps
Examples: See real-world model building applications
API Reference: Deep dive into layer parameters
Performance: Optimize your models for production
Advanced Topics: Explore cutting-edge techniques
Ready to see real examples? Check out the Examples section!