Factory pattern is used through Hadoop and other open source projects. I have collected some examples I found along the way.
org.apache.hadoop.io.WritableFactories
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.util.ReflectionUtils;
import java.util.HashMap;
/** Factories for non-public writables. Defining a factory permits {@link
* ObjectWritable} to be able to construct instances of non-public classes. */
@InterfaceAudience.Public
@InterfaceStability.Stable
public class WritableFactories {
private static final HashMap<Class, WritableFactory> CLASS_TO_FACTORY =
new HashMap<Class, WritableFactory>();
private WritableFactories() {} // singleton
/** Define a factory for a class. */
public static synchronized void setFactory(Class c, WritableFactory factory) {
CLASS_TO_FACTORY.put(c, factory);
}
/** Define a factory for a class. */
public static synchronized WritableFactory getFactory(Class c) {
return CLASS_TO_FACTORY.get(c);
}
/** Create a new instance of a class with a defined factory. */
public static Writable newInstance(Class<? extends Writable> c, Configuration conf) {
WritableFactory factory = WritableFactories.getFactory(c);
if (factory != null) {
Writable result = factory.newInstance();
if (result instanceof Configurable) {
((Configurable) result).setConf(conf);
}
return result;
} else {
return ReflectionUtils.newInstance(c, conf);
}
}
/** Create a new instance of a class with a defined factory. */
public static Writable newInstance(Class<? extends Writable> c) {
return newInstance(c, null);
}
}
org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.factories.impl.pb;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.InetSocketAddress;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ipc.ProtobufRpcEngine;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.security.token.SecretManager;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.factories.RpcServerFactory;
import com.google.protobuf.BlockingService;
public class RpcServerFactoryPBImpl implements RpcServerFactory {
private static final Log LOG = LogFactory.getLog(RpcServerFactoryPBImpl.class);
private static final String PROTO_GEN_PACKAGE_NAME = "org.apache.hadoop.yarn.proto";
private static final String PROTO_GEN_CLASS_SUFFIX = "Service";
private static final String PB_IMPL_PACKAGE_SUFFIX = "impl.pb.service";
private static final String PB_IMPL_CLASS_SUFFIX = "PBServiceImpl";
private static final RpcServerFactoryPBImpl self = new RpcServerFactoryPBImpl();
private Configuration localConf = new Configuration();
private ConcurrentMap<Class<?>, Constructor<?>> serviceCache = new ConcurrentHashMap<Class<?>, Constructor<?>>();
private ConcurrentMap<Class<?>, Method> protoCache = new ConcurrentHashMap<Class<?>, Method>();
public static RpcServerFactoryPBImpl get() {
return RpcServerFactoryPBImpl.self;
}
private RpcServerFactoryPBImpl() {
}
public Server getServer(Class<?> protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers)
throws YarnException {
return getServer(protocol, instance, addr, conf, secretManager, numHandlers,
null);
}
@Override
public Server getServer(Class<?> protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers,
String portRangeConfig)
throws YarnException {
Constructor<?> constructor = serviceCache.get(protocol);
if (constructor == null) {
Class<?> pbServiceImplClazz = null;
try {
pbServiceImplClazz = localConf
.getClassByName(getPbServiceImplClassName(protocol));
} catch (ClassNotFoundException e) {
throw new YarnException("Failed to load class: ["
+ getPbServiceImplClassName(protocol) + "]", e);
}
try {
constructor = pbServiceImplClazz.getConstructor(protocol);
constructor.setAccessible(true);
serviceCache.putIfAbsent(protocol, constructor);
} catch (NoSuchMethodException e) {
throw new YarnException("Could not find constructor with params: "
+ Long.TYPE + ", " + InetSocketAddress.class + ", "
+ Configuration.class, e);
}
}
Object service = null;
try {
service = constructor.newInstance(instance);
} catch (InvocationTargetException e) {
throw new YarnException(e);
} catch (IllegalAccessException e) {
throw new YarnException(e);
} catch (InstantiationException e) {
throw new YarnException(e);
}
Class<?> pbProtocol = service.getClass().getInterfaces()[0];
Method method = protoCache.get(protocol);
if (method == null) {
Class<?> protoClazz = null;
try {
protoClazz = localConf.getClassByName(getProtoClassName(protocol));
} catch (ClassNotFoundException e) {
throw new YarnException("Failed to load class: ["
+ getProtoClassName(protocol) + "]", e);
}
try {
method = protoClazz.getMethod("newReflectiveBlockingService",
pbProtocol.getInterfaces()[0]);
method.setAccessible(true);
protoCache.putIfAbsent(protocol, method);
} catch (NoSuchMethodException e) {
throw new YarnException(e);
}
}
try {
return createServer(pbProtocol, addr, conf, secretManager, numHandlers,
(BlockingService)method.invoke(null, service), portRangeConfig);
} catch (InvocationTargetException e) {
throw new YarnException(e);
} catch (IllegalAccessException e) {
throw new YarnException(e);
} catch (IOException e) {
throw new YarnException(e);
}
}
private String getProtoClassName(Class<?> clazz) {
String srcClassName = getClassName(clazz);
return PROTO_GEN_PACKAGE_NAME + "." + srcClassName + "$" + srcClassName + PROTO_GEN_CLASS_SUFFIX;
}
private String getPbServiceImplClassName(Class<?> clazz) {
String srcPackagePart = getPackageName(clazz);
String srcClassName = getClassName(clazz);
String destPackagePart = srcPackagePart + "." + PB_IMPL_PACKAGE_SUFFIX;
String destClassPart = srcClassName + PB_IMPL_CLASS_SUFFIX;
return destPackagePart + "." + destClassPart;
}
private String getClassName(Class<?> clazz) {
String fqName = clazz.getName();
return (fqName.substring(fqName.lastIndexOf(".") + 1, fqName.length()));
}
private String getPackageName(Class<?> clazz) {
return clazz.getPackage().getName();
}
private Server createServer(Class<?> pbProtocol, InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager, int numHandlers,
BlockingService blockingService, String portRangeConfig) throws IOException {
RPC.setProtocolEngine(conf, pbProtocol, ProtobufRpcEngine.class);
RPC.Server server = RPC.getServer(pbProtocol, blockingService,
addr.getHostName(), addr.getPort(), numHandlers, false, conf,
secretManager, portRangeConfig);
LOG.info("Adding protocol "+pbProtocol.getCanonicalName()+" to the server");
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, pbProtocol, blockingService);
return server;
}
}
org.apache.flume.source.DefaultSourceFactory
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flume.source;
import org.apache.flume.FlumeException;
import org.apache.flume.Source;
import org.apache.flume.SourceFactory;
import org.apache.flume.conf.source.SourceType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Preconditions;
public class DefaultSourceFactory implements SourceFactory {
private static final Logger logger = LoggerFactory
.getLogger(DefaultSourceFactory.class);
@Override
public Source create(String name, String type) throws FlumeException {
Preconditions.checkNotNull(name, "name");
Preconditions.checkNotNull(type, "type");
logger.info("Creating instance of source {}, type {}", name, type);
Class<? extends Source> sourceClass = getClass(type);
try {
Source source = sourceClass.newInstance();
source.setName(name);
return source;
} catch (Exception ex) {
throw new FlumeException("Unable to create source: " + name
+", type: " + type + ", class: " + sourceClass.getName(), ex);
}
}
@SuppressWarnings("unchecked")
@Override
public Class<? extends Source> getClass(String type)
throws FlumeException {
String sourceClassName = type;
SourceType srcType = SourceType.OTHER;
try {
srcType = SourceType.valueOf(type.toUpperCase());
} catch (IllegalArgumentException ex) {
logger.debug("Source type {} is a custom type", type);
}
if (!srcType.equals(SourceType.OTHER)) {
sourceClassName = srcType.getSourceClassName();
}
try {
return (Class<? extends Source>) Class.forName(sourceClassName);
} catch (Exception ex) {
throw new FlumeException("Unable to load source type: " + type
+ ", class: " + sourceClassName, ex);
}
}
}