/* * Workbench.java */ package org.ngbw.sdk; import java.io.BufferedReader; import java.io.IOException; import java.io.StringReader; import java.lang.reflect.InvocationTargetException; import java.sql.SQLException; import java.text.ParseException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Properties; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.ngbw.sdk.api.conversion.ConversionService; import org.ngbw.sdk.api.conversion.RecordFilter; import org.ngbw.sdk.api.core.CoreRegistry; import org.ngbw.sdk.api.core.GenericDataRecordCollection; import org.ngbw.sdk.api.core.SourceDocumentTransformer; import org.ngbw.sdk.api.data.SimpleSearchMetaQuery; import org.ngbw.sdk.common.util.Resource; import org.ngbw.sdk.common.util.ResourceNotFoundException; import org.ngbw.sdk.common.util.StringUtils; import org.ngbw.sdk.common.util.ValidationResult; import org.ngbw.sdk.core.configuration.Configuration; import org.ngbw.sdk.core.configuration.ServiceFactory; import org.ngbw.sdk.core.shared.IndexedDataRecord; import org.ngbw.sdk.core.shared.SourceDocumentBean; import org.ngbw.sdk.core.shared.SourceDocumentType; import org.ngbw.sdk.core.types.DataFormat; import org.ngbw.sdk.core.types.DataType; import org.ngbw.sdk.core.types.Dataset; import org.ngbw.sdk.core.types.EntityType; import org.ngbw.sdk.core.types.RecordFieldType; import org.ngbw.sdk.core.types.RecordType; import org.ngbw.sdk.database.ConnectionManager; import org.ngbw.sdk.database.ConnectionSource; import org.ngbw.sdk.database.Folder; import org.ngbw.sdk.database.Group; import org.ngbw.sdk.database.SourceDocument; import org.ngbw.sdk.database.Task; import org.ngbw.sdk.database.User; import org.ngbw.sdk.database.UserDataItem; import org.ngbw.sdk.common.util.Resource; /** * This class is so far the main entry point into to the NGBW SDK.
* * The Workbench spawns a WorkbenchSession for User bound * interaction. The Workbench keeps track of all spawned WorkbenchSessions and * secures that each user interacts with only one WorkbenchSession. The * Workbench is also responsible for suspending WorkbenchSessions to disk and * resume suspended WorkbenchSessions. * * @author Roland H. Niedner * @author Paul Hoover * */ public class Workbench { private static Workbench SINGLETON; private static final Log log = LogFactory.getLog(Workbench.class); private final ServiceFactory serviceFactory; private final ConcurrentHashMap activeSessions = new ConcurrentHashMap(); private Properties properties; /** * @return Workbench */ public static synchronized Workbench getInstance() { if (SINGLETON == null) SINGLETON = new Workbench(); return SINGLETON; } /** * @param cfg * @return Workbench */ public static synchronized Workbench getInstance(Resource cfg) { if (SINGLETON != null) throw new WorkbenchException( "A workbench instance already exists. Use getInstance() for follow up calls!"); SINGLETON = new Workbench(cfg); return SINGLETON; } /** * @param serviceFactory * @return Workbench */ public static synchronized Workbench getInstance(ServiceFactory serviceFactory) { if (SINGLETON != null) throw new WorkbenchException( "A workbench instance already exists. Use getInstance() for follow up calls!"); SINGLETON = new Workbench(serviceFactory); return SINGLETON; } protected Workbench() { this(new Configuration().configure().buildServiceFactory()); } protected Workbench(Resource cfg) { this(new Configuration().configure(cfg).buildServiceFactory()); } protected Workbench(ServiceFactory factory) { try { Resource pr = Resource.getResource("workbench.properties"); properties = pr.getProperties(); } catch (ResourceNotFoundException e) { properties = new Properties(); } serviceFactory = factory; try { ConnectionManager.setConnectionSource(); } catch (Exception err) { throw new WorkbenchException(err); } } /** Get properties loaded from workbench.properties on the classpath. Should be treated as read-only. */ public Properties getProperties() { return this.properties; } @Override protected void finalize() { try { ConnectionSource connSource = ConnectionManager.getConnectionSource(); if (connSource != null) connSource.close(); } catch (SQLException sqlErr) { log.error("Caught an exception during finalization", sqlErr); } } /* Workbench Basic Methods */ /** * Method returns the ServiceFactory which provides access to all module * controllers of the workbench. * * @return serviceFactory */ public ServiceFactory getServiceFactory() { return serviceFactory; } /* Session Management */ /** * Method returns an exclusive session for the user if the authentication * succeeds. * * @param username * @param password * @return session * @throws UserAuthenticationException * @throws SQLException * @throws IOException */ public WorkbenchSession getSession(String username, String password) throws UserAuthenticationException, IOException, SQLException { if (hasActiveSession(username)) throw new RuntimeException("User " + username + " already has an active session"); User user = User.findUserByUsername(username); if (user == null) throw new UserAuthenticationException("User does not exist!"); String hash = StringUtils.getMD5HexString(password); if (!user.getPassword().equals(hash)) throw new UserAuthenticationException("Passwords don't match!"); WorkbenchSession session = new WorkbenchSession(user, this); activeSessions.put(username, session); return session; } /** * Returns the active WorkbenchSession object for the indicated user. A WorkbenchSession * object is only returned if the user actually has an active session, and if the given password matches the * user's persisted password. * * @param username the name of the user * @param encryptedPassword the MD5 hash value for the user's password * @return a WorkbenchSession object if the user has an active session, null otherwise * @throws IOException * @throws SQLException * @throws UserAuthenticationException */ public WorkbenchSession getActiveSession(String username, String encryptedPassword) throws IOException, SQLException, UserAuthenticationException { WorkbenchSession session = activeSessions.get(username); if (session == null) return null; if (!session.getUser().getPassword().equals(encryptedPassword)) throw new UserAuthenticationException("Passwords don't match!"); return session; } /** * A temporary measure to enable interaction with the Sirius applet. * * @param username * @param encryptedPassword * @return * @throws IOException * @throws SQLException * @throws UserAuthenticationException */ public WorkbenchSession getSessionForApplet(String username, String encryptedPassword) throws IOException, SQLException, UserAuthenticationException { WorkbenchSession session = getActiveSession(username, encryptedPassword); if (session != null) return session; User user = User.findUserByUsername(username); if (user == null) throw new UserAuthenticationException("User does not exist!"); if (!user.getPassword().equals(encryptedPassword)) throw new UserAuthenticationException("Passwords don't match!"); session = new WorkbenchSession(user, this); activeSessions.put(username, session); return session; } /** * Suspends the WorkbenchSession for the user with the submitted username by * removing is from the active session map. * * @param username */ public void suspendSession(String username) { activeSessions.remove(username); } /** * Method checks whether the user with the submitted username already has an * active WorkbenchSession. * * @param username * @return hasActiveSession */ public boolean hasActiveSession(String username) { return getActiveUsers().contains(username); } /** * Method returns a set of all usernames from Users with an active * WorkbenchSession. * * @return activeUsers */ public Set getActiveUsers() { return activeSessions.keySet(); } /* MetaController Methods */ /** * Method returns whether there is a SourceDocumentTransformer registered * for the submitted SourceDocumentType. * * @param type * @return hasTransformer */ public boolean hasTransformer(SourceDocumentType type, RecordType targetType) { return serviceFactory.getCoreRegistry().hasTransformerClass(type, targetType); } /** * Method returns the set of RecordTypes that a SourceDocuemnt of the submitted * type can be transformed into. * * @param type * @return targetTypes */ public Set getTransformationTargetRecordTypes(SourceDocumentType type) { return serviceFactory.getCoreRegistry().getTransformationTargetRecordTypes(type); } /** * Return the SourceDocumentTransformer for the * submitted SourceDocument. * * @param sourceDocument * @return transformer * @throws InstantiationException * @throws IllegalAccessException * @throws InvocationTargetException * @throws NoSuchMethodException */ public SourceDocumentTransformer getTransformer(SourceDocument sourceDocument, RecordType targetType) throws InstantiationException, IllegalAccessException, InvocationTargetException, NoSuchMethodException { if (sourceDocument == null) throw new NullPointerException("SourceDocument cannot be null!"); if (targetType == null) throw new NullPointerException("RecordType cannot be null!"); Class transformerClass = serviceFactory.getCoreRegistry().getTransformerClass(sourceDocument.getType(), targetType); SourceDocumentTransformer transformer; transformer = transformerClass.getConstructor(ServiceFactory.class, SourceDocument.class, RecordType.class) .newInstance(serviceFactory, sourceDocument, targetType); return transformer; } /** * Retrieve all user data records associated with the submitted data item. * The DataRecord is a view that gives direct access to parsed out metadata * of the source document. A DataItem may contain 1 or several DataRecords. * These DataRecords are to be understood as a View of the UserDataItem with * no life cycle of their own. * * @param dataItem * @return DataRecords * @throws SQLException * @throws IOException * @throws ParseException */ public GenericDataRecordCollection extractDataRecords(UserDataItem dataItem) throws IOException, SQLException, ParseException { if (dataItem == null) throw new NullPointerException("UserDataItem cannot be null!"); return serviceFactory.getConversionService().read(dataItem); } /** * Method extract the DataRecords for each UserDataItem in the submitted list. * The returned map keys the extracted DataRecordCollection to the UserDataItem * it was extracted from. * * @param dataItems * @return dataRecordCollections */ public Map> extractDataRecordCollections( List dataItems) { if (dataItems == null) throw new NullPointerException("UserDataItems cannot be null!"); Map> dataRecordCollections = new HashMap>(); for (UserDataItem dataItem : dataItems) { // Note to Hannes: I added the following try-catch block to allow processing // to continue even if a UserDataItem is encountered whose DataRecords cannot // be extracted. This will often be the case, since users can upload data // with Unknown record type. Please feel free to change this error handling // to whatever you feel is appropriate. // - Jeremy try { dataRecordCollections.put(dataItem, extractDataRecords(dataItem)); } catch (Exception e) { if (log.isDebugEnabled()) log.debug("Error extracting DataRecordCollection from UserDataItem " + dataItem.getUserDataId() + ": " + e.getMessage()); } } return dataRecordCollections; } /** * Method extracts the individual record portion from SourceDocument associated * with the submitted UserDataItem corresponding to the submitted index of * the selected DataRecord. * * @param dataItem * @param dataRecordIndex * @return sourceDocument * @throws SQLException * @throws IOException */ public SourceDocument extractSubSourceDocument(UserDataItem dataItem, int dataRecordIndex) throws IOException, SQLException { if (dataItem == null) throw new NullPointerException("UserDataItem cannot be null!"); List sourceDocuments = splitSourceDocument(dataItem); return sourceDocuments.get(dataRecordIndex); } /** * Method extracts the individual record portion from the submitted * SourceDocument associated corresponding to the submitted index of * the selected DataRecord. * * @param sourceDocument * @param dataRecordIndex * @return subSourceDocument * @throws SQLException * @throws IOException */ public SourceDocument extractSubSourceDocument(SourceDocument sourceDocument, int dataRecordIndex) throws IOException, SQLException { if (sourceDocument == null) throw new NullPointerException("SourceDocument cannot be null!"); List sourceDocuments = splitSourceDocument(sourceDocument); return sourceDocuments.get(dataRecordIndex); } /** * Method extracts the individual record portions from SourceDocument associated * with the submitted UserDataItem corresponding to the submitted indices of * the selected DataRecords. * * @param dataItem * @param dataRecordIndices * @return sourceDocuments * @throws SQLException * @throws IOException */ public List extractSubSourceDocuments(UserDataItem dataItem, int[] dataRecordIndices) throws IOException, SQLException { if (dataItem == null) throw new NullPointerException("UserDataItem cannot be null!"); List sourceDocuments = splitSourceDocument(dataItem); List filteredSourceDocuments = new ArrayList(dataRecordIndices.length); for (int index : dataRecordIndices) filteredSourceDocuments.add(sourceDocuments.get(index)); return filteredSourceDocuments; } /** * Method separates all UserDataItems in the submitted folders into RecordTypes * specific Lists and returns these List keyed to their RecordType in a Map. * @param folder * @return typeUserDataItemLists * @throws SQLException * @throws IOException */ public Map> sortDataItemsByRecordType(Folder folder) throws IOException, SQLException { CoreRegistry coreRegistry = serviceFactory.getCoreRegistry(); Map> typedLists = new HashMap>(); for (UserDataItem dataItem : folder.findDataItems()) { SourceDocumentType sdt = dataItem.getType(); RecordType rt = coreRegistry.getRecordType(sdt.getEntityType(), sdt.getDataType()); if (rt == null) rt = RecordType.UNKNOWN; if (typedLists.containsKey(rt) == false) typedLists.put(rt, new ArrayList()); typedLists.get(rt).add(dataItem); } return typedLists; } /** * Method separates all UserDataItems in the submitted folders into DataFormat * specific Lists and returns these List keyed to their RecordType in a Map. * * @param folder * @return typeUserDataItemLists * @throws SQLException * @throws IOException */ public Map> sortDataItemsByDataFormat(Folder folder) throws IOException, SQLException { Map> typedLists = new HashMap>(); for (UserDataItem dataItem : folder.findDataItems()) { DataFormat format = dataItem.getDataFormat(); if (typedLists.containsKey(format) == false) { List newList = new ArrayList(); newList.add(dataItem); typedLists.put(format, newList); } else typedLists.get(format).add(dataItem); } return typedLists; } /* TaskAgent Support Methods */ /** * Start a new TaskRun for the submitted Task using the submitted inputData. * Since a Task can be shared via group memberships as taskRun might have a * different owner then the task itself. Thus the current userProfile must * me submitted (initial group is the default group for this user. * * This method will supervise the following steps: *
    *
  1. look up of the required input DataFormat for the tool of the task
  2. *
  3. conversion of the inputData into a suitable SourceDocument
  4. *
  5. getting a filename for the input file
  6. *
  7. staging of the SourceDocument to the tool workspace
  8. *
  9. getting filename for each parameter UserDataItem
  10. *
  11. staging of each parameter file to the tool workspace
  12. *
  13. render the commandline
  14. *
  15. launch a process executing the command
  16. *
  17. depending on the tool:
  18. *
      *
    • capture the process output directly
    • *
    • retrieve the content of the output file
    • *
    *
  19. write the output into a new UserDataItem
  20. *
  21. set the output dataItem to the TaskRun output
  22. * * Depending on the speed of the process the returned TaskRun instance will * either already contain the output or not. TaskRun staging execution and * unstaging happens in a separate thread. You can specify in seconds how * long the submitting thread will wait for the result to come back. * * @param task * @return updated task instance * @throws SQLException * @throws IOException */ public Future submitTask(Task task) throws WorkbenchException, IOException, SQLException { if (task == null) throw new NullPointerException("task"); return (new TaskRunner(serviceFactory)).executeTask(task); } /** * Method allows you to submit a transient task. Before submission the task * is saved to the target folder. see submitTask(Task task) * * @param task * @param folder * @return saved task instance * @throws SQLException * @throws IOException */ public Future saveAndSubmitTask(Task task, Folder folder) throws WorkbenchException, IOException, SQLException { if (task == null) throw new NullPointerException("task"); task.setEnclosingFolder(folder); task.save(); return (new TaskRunner(serviceFactory)).executeTask(task); } /** Run already rendered commands synchronously. Does so by inserting a Task that isn't associated with any user. @param toolName @param command Rendered command line to be executed. @param input Maps filenames (as they appear in command) to their contents as byte arrays. The data will be staged to the specified files before the command is executed. @param outputFiles Maps parameter names to the names of the expected output files. Filenames may include wildcards. The contents of the files will be harvested to create the map that is returned. @param allowMissingFiles if false an exception will be thrown if any of the expected outputFiles aren't found. If true, missing files don't generate an exception. THIS PARAMETER IS NOW IGNORED. BaseProcessWorker.storeOutputFiles() doesn't throw an exception when a file is missing ... missing is always allowed. @return Map of output parameter name -> (map of filename -> file contents) Each output parameter may map to multiple filenames if wildcards were used in outputFiles. @throws SQLException @throws IOException @throws ExecutionException @throws InterruptedException */ public Map> runCommand( String toolName, String[] command, Map input, Map outputFiles, boolean allowMissingFiles) throws IOException, SQLException, ExecutionException, InterruptedException { return (new CommandRunner(serviceFactory)).runCommand(toolName, command, input, outputFiles); } /* Semantic Annotation Methods */ /** * Return all registered EntityTypes that are mapped to non-abstract * RecordTypes * * @return Set */ public Set getEntityTypes() { return serviceFactory.getCoreRegistry().getEntityTypes(); } /** * Return all registered EntityTypes (such as PROTEIN, NUCLEIC_ACID, * COMPOUND, etc.) * * @return Set */ public Set getAllEntityTypes() { return serviceFactory.getCoreRegistry().getAllEntityTypes(); } /** * Return all registered DataTypes that are mapped to non-abstract * RecordTypes. * * @return Set */ public Set getDataTypes() { return serviceFactory.getCoreRegistry().getDataTypes(); } /** * Return all registered DataTypes (SEQUENCE, STRUCTURE, SEQUENCE_ALIGNMENT * etc.) * * @return Set */ public Set getAllDataTypes() { return serviceFactory.getCoreRegistry().getAllDataTypes(); } /** * Return all registered RecordTypes (such as PROTEIN_SEQUENCE, * NUCLEIC_ACID_SEQUENCE, COMPOUND_STRUCTURE, etc.) * * @return Set */ public Set getRecordTypes() { return serviceFactory.getCoreRegistry().getRecordTypes(); } /** * Return all registered RecordTypes for the submitted EntityType (such as * PROTEIN_SEQUENCE, PROTEIN_SEQUENCE_ALIGNMENT, etc. for PROTEIN). * * @param entityType * @return Set */ public Set getRecordTypes(EntityType entityType) { return serviceFactory.getCoreRegistry().getRecordTypes(entityType); } /** * Return all registered RecordTypes for the submitted DataType (such as * PROTEIN_SEQUENCE, NUCLEIC_ACID_SEQUENCE, etc. for SEQUENCE). * * @param dataType * @return Set */ public Set getRecordTypes(DataType dataType) { return serviceFactory.getCoreRegistry().getRecordTypes(dataType); } /** * Returns the registered RecordType for the submitted EntityType and DataType. * * @param entityType * @param dataType * @return RecordType */ public RecordType getRecordType(EntityType entityType, DataType dataType) { return serviceFactory.getCoreRegistry().getRecordType(entityType, dataType); } /** * Method returns the DataType of the submitted RecordType. * * @param recordType * @return dataType */ public DataType getDataType(RecordType recordType) { return serviceFactory.getCoreRegistry().getDataType(recordType); } /** * Method returns the EntityType of the submitted RecordType. * * @param recordType * @return entityType */ public EntityType getEntityType(RecordType recordType) { return serviceFactory.getCoreRegistry().getEntityType(recordType); } /** * Method returns all RecordFields for the submitted RecordType. * * @param recordType * @return recordFields */ public Set getRecordFields(RecordType recordType) { return serviceFactory.getCoreRegistry().getRecordFields(recordType); } /* ConversionService Methods */ /** * Method splits a (potentially) multi-record SourceDocument into * a List of SourceDocuments, each corresponding to an individual * DataRecord. * * @param sourceDocument * @return sourceDocumentList * @throws SQLException * @throws IOException */ public List splitSourceDocument(SourceDocument sourceDocument) throws IOException, SQLException { if (sourceDocument == null) throw new NullPointerException("SourceDocument cannot be null!"); ConversionService cs = serviceFactory.getConversionService(); List splitSourceDocuments = new ArrayList(); RecordFilter filter = cs.getRecordFilter(sourceDocument.getDataFormat()); BufferedReader br = new BufferedReader(new StringReader(new String(sourceDocument.getData()))); filter.setInput(br); SourceDocumentType sdt = sourceDocument.getType(); while(filter.hasNext()) { SourceDocument sd = new SourceDocumentBean(sdt, filter.next().getBytes()); splitSourceDocuments.add(sd); } filter.close(); return splitSourceDocuments; } /** * Method returns all registered DataFormats that can be read into a * DataRecord. Be aware that some DataFormats encode different DataTypes * (like a Fasta formatted data may represent a single Sequence or a * collection of Sequences or an Alignment. * CAVE: registered does not imply readable * * @return dataFormats */ public Set getRegisteredDataFormats() { Set registeredTypes = serviceFactory .getConversionService().getConversionRegistry() .getRegisteredDocumentTypes(); Set dataFormats = new HashSet(); for (SourceDocumentType sourceDocumentType : registeredTypes) dataFormats.add(sourceDocumentType.getDataFormat()); return dataFormats; } /** * Method returns all registered DataFormats for the submitted RecordType * that can be read into a DataRecord. * CAVE: registered does not imply readable * * @param recordType * @return dataFormats */ public Set getRegisteredDataFormats(RecordType recordType) { CoreRegistry coreRegistry = serviceFactory.getCoreRegistry(); Set registeredTypes = serviceFactory .getConversionService().getConversionRegistry() .getRegisteredDocumentTypes(); Set dataFormats = new HashSet(); for (SourceDocumentType sourceDocumentType : registeredTypes) { RecordType rt = coreRegistry.getRecordType(sourceDocumentType.getEntityType(), sourceDocumentType.getDataType()); if (rt == null) continue; if (rt.equals(recordType)) dataFormats.add(sourceDocumentType.getDataFormat()); } return dataFormats; } /** * Parse a SourceDocument instance that contains a one or more entries (eg. * single sequence fasta file) and populate a DataRecordCollection. * * @param srcDocument * @return dataRecordCollection * @throws SQLException * @throws IOException * @throws ParseException */ public GenericDataRecordCollection read(SourceDocument srcDocument) throws IOException, SQLException, ParseException { return serviceFactory.getConversionService().read(srcDocument); } /** * This method will parse a SourceDocument and reassemble the data into the * submitted target SourceDocumentType. This conversion will only then lead * to a successful result if the information content of source format <= * target format. * * @param srcDocument * @param targetKey * @return sourceDocument * @throws SQLException * @throws IOException * @throws ParseException */ public SourceDocument convert(SourceDocument srcDocument, SourceDocumentType targetKey) throws IOException, SQLException, ParseException { return serviceFactory.getConversionService().convert(srcDocument, targetKey); } /** * This method will parse all SourceDocuments of the submitted input * collection and reassemble all of them into one new SourceDocuments of the * submitted target SourceDocumentType. This conversion will only then lead * to a successful result if the information content of source format <= * target format. * * @param srcDocuments * @param targetKey * @return sourceDocument * @throws SQLException * @throws IOException * @throws ParseException */ public SourceDocument convert(Collection srcDocuments, SourceDocumentType targetKey) throws IOException, SQLException, ParseException { return serviceFactory.getConversionService().convert(srcDocuments, targetKey); } /** * Method returns all target SemanticKeys that the submitted source * DataFormat can be converted into. * * @param sourceDocumentType * @return targetSemanticKeys */ public Set getSourceDocumentTypes( SourceDocumentType sourceDocumentType) { return serviceFactory.getConversionService().getTargetSourceDocumentTypes( sourceDocumentType); } /** * Method checks whether there is a SourceDocumentReader registered in the * ConversionService for the submitted SourceDocumentType. * * @param sourceDocumentType * @return canRead */ public boolean canRead(SourceDocumentType sourceDocumentType) { return serviceFactory.getConversionService() .canRead(sourceDocumentType); } /** * Method checks the submitted SourceDocument whether the data are indeed * formatted in the declared SourceDocumentType. It will also set the flag * of the sourceDocument to validated = true if the check is successful. * * @param srcDocument * @return isValid * @throws SQLException * @throws IOException */ public boolean validate(SourceDocument srcDocument) throws IOException, SQLException { return serviceFactory.getConversionService().validate(srcDocument); } /** * Method checks whether there is a converter registered in the * ConversionService that can submitted SourceDocument can be converted into * the target format. * * @param sourceKey * @param targetKey * @return true if the SourceDocument can be converted into the target * format. */ public boolean canConvert(SourceDocumentType sourceKey, SourceDocumentType targetKey) { return serviceFactory.getConversionService().canConvert(sourceKey, targetKey); } /* ************ DatasetService Methods ********** */ /** * Method returns all RecordTypes that have at least one Dataset * registered. * * @return recordTypes */ public Set getSearchableRecordTypes() { return serviceFactory.getDatasetService().getSearchableRecordTypes(); } /** * Method returns all EntityTypes that have at least one Dataset * registered. * * @return entityTypes */ public Set getSearchableEntityTypes() { return serviceFactory.getDatasetService().getSearchableEntityTypes(); } /** * Method returns all DataTypes that have at least one Dataset * registered. * * @return dataTypes */ public Set getSearchableDataTypes() { return serviceFactory.getDatasetService().getSearchableDataTypes(); } /** * Method returns all registered Datasets. * * @return datasets */ public Set getDatasets() { return serviceFactory.getDatasetService().getDatasets(); } /** * Method returns all registered Datasets for the submitted DataType and * EntityType. * * @param entityType * @param dataType * @return datasets */ public Set getDatasets(EntityType entityType, DataType dataType) { return serviceFactory.getDatasetService().getDatasets(entityType, dataType); } /** * Method returns all registered Datasets for the submitted RecordType. * * @param recordType * @return datasets */ public Set getDatasets(RecordType recordType) { EntityType entityType = serviceFactory.getCoreRegistry().getEntityType( recordType); DataType dataType = serviceFactory.getCoreRegistry().getDataType( recordType); return serviceFactory.getDatasetService().getDatasets(entityType, dataType); } /** * Method returns the SourceDocumentType for the submitted Dataset. * * @param dataset * @return sourceDocumentType */ public SourceDocumentType getSourceDocumentType(Dataset dataset) { return serviceFactory.getDatasetService() .getSourceDocumentType(dataset); } /** * Method returns the RecordType for the submitted Dataset. * * @param dataset * @return recordType */ public RecordType getRecordType(Dataset dataset) { return serviceFactory.getDatasetService().getRecordType(dataset); } /* Query Methods */ /** * Get a SimpleSearchMetaQuery for the submitted Dataset. A * SimpleSearchMetaQuery can be used multiple times and executed with a * different searchPhrase respectively. * * @param dataset * @return SimpleSearchMetaQuery for the dataset parameter */ public SimpleSearchMetaQuery getSimpleSearchQuery(Dataset dataset) { return serviceFactory.getDatasetService().getSimpleSearchQuery(dataset); } /** * Get a SimpleSearchMetaQuery for the submitted Datasets. A * SimpleSearchMetaQuery can be used multiple times and executed with a * different searchPhrase respectively. * * @param datasets * @return SimpleSearchMetaQuery for the datasets parameter */ public SimpleSearchMetaQuery getSimpleSearchQuery(Set datasets) { return serviceFactory.getDatasetService() .getSimpleSearchQuery(datasets); } /* ************ ToolService Methods ********** */ /** * Return all registered Tools. * * @return Set */ public Set getToolIds() { return serviceFactory.getToolRegistry().getToolIds(); } /* User and UserData Administrative Methods */ /** * Method allows a client to register a new user account. The method checks * that the submitted fields do not violate any constraints in the user * database. * * @param user * @return ValidationResult * @throws SQLException * @throws IOException */ public ValidationResult registerNewUser(User user) throws IOException, SQLException { ValidationResult result = new ValidationResult(); if (User.findUserByUsername(user.getUsername()) != null) { result.addError("Username " + user.getUsername() + " is not available!"); return result; } if (User.findUserByEmail(user.getEmail()) != null) { result.addError("A user with this email: " + user.getEmail() + " already exists!"); return result; } user.save(); return result; } /** * Method allows a client to fully register a guest user account. The method * checks that the submitted fields do not violate any constraints in the * user database. The method will typically simply update the existing guest * user account. * * @param user * @return ValidationResult * @throws SQLException * @throws IOException */ public ValidationResult registerGuestUser(User user) throws IOException, SQLException { return registerNewUser(user); } /** * Method allows a client to update user information. The method checks that * the updated fields do not violate any constraints in the user database. * * @param user * @return ValidationResult * @throws SQLException * @throws IOException */ public ValidationResult updateUser(User user) throws IOException, SQLException { user.save(); return new ValidationResult(); } /** * Method allows an administrator to reset a user password without knowing * the existing password. * * @param username * @param newPassword * @throws SQLException * @throws IOException */ public void resetPasswordAdmin(String username, String newPassword) throws IOException, SQLException { User user = User.findUserByUsername(username); if (user == null) throw new WorkbenchException("User does not exist!"); user.setPassword(newPassword); user.save(); } /** * Method saves the submitted group and assigns the user with the submitted * username as group administrator. * * @param group * @param administrator * @return group * @throws SQLException * @throws IOException */ public Group saveNewGroup(Group group, User administrator) throws IOException, SQLException { group.setAdministrator(administrator); group.save(); return group; } // Factory methods /** * Method returns a new transient User instance. * * @return user */ public User getNewUserInstance() { return new User(); } /** * Method returns a new transient Group instance. * * @return group */ public Group getNewGroupInstance() { return new Group(); } }