I am using Adaboost to fit a classification problem. We can do the following:
ens = fitensemble(X, Y, 'AdaBoostM1', 100, 'Tree')
Now 'Tree' is the learner and we can change this to 'Discriminant' or 'KNN'. Each learner uses a certain Template Object Creation Function
. More info here.
Is it possible to create your own function and use it as a learner? And how?
I open templateTree.m and templateKNN.m to see how MATLAB define Template Object Creation Function.
function temp = templateKNN(varargin)
classreg.learning.FitTemplate.catchType(varargin{:});
temp = classreg.learning.FitTemplate.make('KNN','type','classification',varargin{:});
end
and
function temp = templateTree(varargin)
temp = classreg.learning.FitTemplate.make('Tree',varargin{:});
end
It shows that MATLAB has a function in FitTemplate named make , if you open this m-file, you will see :
function temp = make(method,varargin)
% Check the type of the required argument
if ~ischar(method)
error(message('stats:classreg:learning:FitTemplate:make:BadArgs'));
end
% Extract type (classification or regression)
args = {'type'};
defs = { ''};
[usertype,~,modelArgs] = ...
internal.stats.parseArgs(args,defs,varargin{:});
% Check usertype
if ~isempty(usertype)
usertype = gettype(usertype);
end
% Method
namesclass = classreg.learning.classificationModels();
namesreg = classreg.learning.regressionModels();
[tfclass,locclass] = ismember(lower(method),lower(namesclass));
[tfreg,locreg] = ismember(lower(method),lower(namesreg));
if ~tfclass && ~tfreg
error(message('stats:classreg:learning:FitTemplate:make:UnknownMethod', method));
end
if tfclass && tfreg
method = namesclass{locclass}; % can get it from namesreg too
type = usertype;
% If type is not passed for an ensemble method, try to
% figure it out from learner types. This is useful for
% users who want to type
% fitensemble(X,Y,'Subspace',100,'Discriminant')
% instead of
% fitensemble(X,Y,'Subspace',100,'Discriminant','type','classification')
if isempty(type) && ismember(method,classreg.learning.ensembleModels())
[learners,~,~] = internal.stats.parseArgs({'learners'},{},modelArgs{:});
if ischar(learners) || isa(learners,'classreg.learning.FitTemplate')
learners = {learners};
elseif ~iscell(learners)
error(message('stats:classreg:learning:FitTemplate:make:BadLearnerTemplates'));
end
L = numel(learners);
% The user can pass several learner templates, and some
% of these learners may be appropriate for
% classification, some for regression, and some for
% both. The ensemble type cannot be determined
% unambiguously unless if all learners are appropriate
% for one type of learning *only*. For example, in 12a
% t1 = ClassificationDiscriminant.template
% t2 = ClassificationKNN.template
% fitensemble(X,Y,'Subspace',10,{t1 t2})
% is going to work because both discriminant and k-NN
% can be used for classification only. If you want to
% mix discriminant and tree, you have to specify the
% ensemble type explicitly:
% t1 = ClassificationDiscriminant.template
% t2 = ClassificationTree.template
% fitensemble(X,Y,'Bag',10,{t1 t2},'type','classification')
types = zeros(L,1); % -1 for regression and 1 for classification
for l=1:L
meth = learners{l};
if isa(meth,'classreg.learning.FitTemplate')
meth = meth.Method;
end
isc = ismember(lower(meth),lower(namesclass));
isr = ismember(lower(meth),lower(namesreg));
if ~isc && ~isr
error(message('stats:classreg:learning:FitTemplate:make:UnknownMethod', meth));
end
types(l) = isc - isr;
end
if all(types==1)
type = 'classification';
elseif all(types==-1)
type = 'regression';
end
end
elseif tfclass
method = namesclass{locclass};
type = 'classification';
else
method = namesreg{locreg};
type = 'regression';
end
% Make sure the type is consistent
if ~isempty(usertype) && ~strcmp(usertype,type)
error(message('stats:classreg:learning:FitTemplate:make:UserTypeMismatch', method, usertype));
end
% Make template
temp = classreg.learning.FitTemplate(method,modelArgs);
temp = fillIfNeeded(temp,type);
end
You must change this function.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With