Real-world examples and use cases demonstrating KerasFactory layers in action. These examples show how to build production-ready tabular models for various domains and applications.
fromkerasfactory.layersimport(DateParsingLayer,DateEncodingLayer,SeasonLayer)defcreate_temporal_pipeline()->keras.Model:# Date parsingdate_parser=DateParsingLayer()# Date encodingdate_encoder=DateEncodingLayer(min_year=1900,max_year=2100)# Season extractionseason_layer=SeasonLayer()returndate_parser,date_encoder,season_layer# Usagedate_parser,date_encoder,season_layer=create_temporal_pipeline()
🎯 Domain-Specific Examples
1. Financial Modeling
1 2 3 4 5 6 7 8 910111213141516171819202122232425
defcreate_financial_model(input_dim:int,num_classes:int)->keras.Model:"""Model for financial risk assessment."""inputs=keras.Input(shape=(input_dim,))# Preprocessing for financial datax=DifferentiableTabularPreprocessor()(inputs)# Feature selection for risk factorsx=VariableSelection(hidden_dim=64)(x)# Attention for complex relationshipsx=TabularAttention(num_heads=8,key_dim=64)(x)# Business rules integrationx=BusinessRulesLayer(rules=[{'feature':'credit_score','operator':'>','value':600,'weight':1.0},{'feature':'debt_ratio','operator':'<','value':0.4,'weight':0.8}],feature_type='numerical',)(x)outputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)
2. Healthcare Analytics
1 2 3 4 5 6 7 8 910111213141516171819202122
defcreate_healthcare_model(input_dim:int,num_classes:int)->keras.Model:"""Model for healthcare outcome prediction."""inputs=keras.Input(shape=(input_dim,))# Preprocessingx=DifferentiableTabularPreprocessor()(inputs)# Advanced numerical embedding for medical featuresx=AdvancedNumericalEmbedding(embedding_dim=64)(x)# Distribution-aware encoding for lab valuesx=DistributionAwareEncoder(encoding_dim=64)(x)# Attention for symptom relationshipsx=TabularAttention(num_heads=8,key_dim=64)(x)# Anomaly detection for outliersx,anomalies=NumericalAnomalyDetection()(x)outputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,[outputs,anomalies])
3. E-commerce Recommendation
1 2 3 4 5 6 7 8 91011121314151617181920212223
defcreate_recommendation_model(input_dim:int,num_classes:int)->keras.Model:"""Model for e-commerce product recommendation."""inputs=keras.Input(shape=(input_dim,))# Preprocessingx=DifferentiableTabularPreprocessor()(inputs)# Feature selection for user preferencesx=VariableSelection(hidden_dim=64)(x)# Multi-resolution attention for different feature typesx=MultiResolutionTabularAttention(num_heads=8,numerical_heads=4,categorical_heads=4)(x)# Feature fusion for recommendationx=GatedFeatureFusion(hidden_dim=128)(x)outputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)
🚀 Performance Examples
1. Memory-Efficient Model
1 2 3 4 5 6 7 8 9101112
defcreate_memory_efficient_model(input_dim:int,num_classes:int)->keras.Model:"""Memory-efficient model for large datasets."""inputs=keras.Input(shape=(input_dim,))# Use smaller dimensionsx=VariableSelection(hidden_dim=32)(inputs)x=TabularAttention(num_heads=4,key_dim=32)(x)x=GatedFeatureFusion(hidden_dim=64)(x)outputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)
2. Speed-Optimized Model
1 2 3 4 5 6 7 8 91011
defcreate_speed_optimized_model(input_dim:int,num_classes:int)->keras.Model:"""Speed-optimized model for real-time inference."""inputs=keras.Input(shape=(input_dim,))# Minimal layers for speedx=VariableSelection(hidden_dim=32)(inputs)x=TabularAttention(num_heads=4,key_dim=32)(x)outputs=keras.layers.Dense(num_classes,activation='softmax')(x)returnkeras.Model(inputs,outputs)
🔍 Analysis and Interpretation
1. Model Interpretation
1 2 3 4 5 6 7 8 910111213141516
definterpret_model(model,X_test,layer_name='tabular_attention'):"""Interpret model using attention weights."""# Get attention weightsattention_model=keras.Model(inputs=model.input,outputs=model.get_layer(layer_name).output,)attention_weights=attention_model.predict(X_test)# Analyze attention patternsmean_attention=np.mean(attention_weights,axis=0)print("Mean attention weights:",mean_attention)returnattention_weights
2. Feature Importance Analysis
1 2 3 4 5 6 7 8 910111213141516
defanalyze_feature_importance(model,X_test,feature_names):"""Analyze feature importance using attention weights."""# Get attention weightsattention_weights=interpret_model(model,X_test)# Calculate feature importancefeature_importance=np.mean(attention_weights,axis=(0,1))# Create importance dataframeimportance_df=pd.DataFrame({'feature':feature_names,'importance':feature_importance}).sort_values('importance',ascending=False)returnimportance_df