@conference {4295659, title = {Learning Fuzzy Linguistic Models from Low Quality Data by Genetic Algorithms}, booktitle = {2007 IEEE International Fuzzy Systems Conference}, year = {2007}, month = {July}, pages = {1-6}, abstract = {Incremental rule base learning techniques can be used to learn models and classifiers from interval or fuzzy-valued data. These algorithms are efficient when the observation error is small. This paper is about datasets with medium to high discrepancies between the observed and the actual values of the variables, such as those containing missing values and coarsely discretized data. We will show that the quality of the iterative learning degrades in this kind of problems, and that it does not make full use of all the available information. As an alternative, we propose a new implementation of a mutiobjective Michigan-like algorithm, where each individual in the population codifies one rule and the individuals in the Pareto front form the knowledge base.}, keywords = {Degradation, Fuzzy sets, Fuzzy systems, genetic algorithms, Global Positioning System, incremental rule base learning techniques, Iterative algorithms, iterative learning degrades, knowledge based systems, learning (artificial intelligence), learning fuzzy linguistic models, Low Quality Data, Noise measurement, Pareto front form, Pareto optimisation, Position measurement, Stochastic resonance, Uncertainty}, issn = {1098-7584}, doi = {10.1109/FUZZY.2007.4295659}, author = {L. S{\'a}nchez and J. Otero} } @conference {4222979, title = {Modeling Vague Data with Genetic Fuzzy Systems under a Combination of Crisp and Imprecise Criteria}, booktitle = {2007 IEEE Symposium on Computational Intelligence in Multi-Criteria Decision-Making}, year = {2007}, month = {April}, pages = {30-37}, abstract = {Multicriteria genetic algorithms can produce fuzzy models with a good balance between their precision and their complexity. The accuracy of a model is usually measured by the mean squared error of its residual. When vague training data is used, the residual becomes a fuzzy number, and it is needed to optimize a combination of crisp and fuzzy objectives in order to learn balanced models. In this paper, we will extend the NSGA-II algorithm to this last case, and test it over a practical problem of causal modeling in marketing. Different setups of this algorithm are compared, and it is shown that the algorithm proposed here is able to improve the generalization properties of those models obtained from the defuzzified training data.}, keywords = {Additive noise, combination, Computer science, crisp objectives, defuzzified training data, fuzzy logic, fuzzy models, fuzzy objectives, Fuzzy systems, generalisation (artificial intelligence), generalization, genetic algorithms, Genetic Fuzzy Systems, Global Positioning System, mean squared error, multicriteria genetic algorithms, Noise measurement, NSGA-II algorithm, Position measurement, Probability distribution, Stochastic resonance, Training data, vague data modeling}, doi = {10.1109/MCDM.2007.369413}, author = {L. S{\'a}nchez and I. Couso and J. Casillas} }