Upgraded Hibernate
authorKai Moritz <kai@juplo.de>
Sun, 6 Nov 2016 11:05:05 +0000 (12:05 +0100)
committerKai Moritz <kai@juplo.de>
Sun, 13 Nov 2016 16:37:15 +0000 (17:37 +0100)
 * hibernate-core 5.0.2.Final -> 5.2.4.Final
 * hibernate-validator 5.2.2.Final -> 5.3.1.Final

19 files changed:
pom.xml
src/it/h4mp-mod/schema.sql
src/it/hib-test/h2.sql
src/it/hib-test/oracle.sql
src/it/hib-test/postgres.sql
src/it/hibernate4-maven-plugin-envers-sample/create-tables-hsqldb.sql
src/it/hibernate4-maven-plugin-envers-sample/drop-tables-hsqldb.sql
src/it/ignored-dependency/schema.sql
src/it/properties/h2.sql
src/it/schemaexport-example/schema.sql
src/it/tutorials/entitymanager/pom.xml
src/it/tutorials/envers/pom.xml
src/it/tutorials/osgi/managed-jpa/pom.xml
src/it/tutorials/schema-osgi-managed-jpa.sql
src/it/tutorials/schema-osgi-unmanaged-jpa.sql
src/it/tutorials/schema-osgi-unmanaged-native.sql
src/main/java/de/juplo/plugins/hibernate/AbstractSchemaMojo.java
src/main/java/de/juplo/plugins/hibernate/CreateMojo.java
src/main/java/de/juplo/plugins/hibernate/DropMojo.java

diff --git a/pom.xml b/pom.xml
index 11f9e54..eaaa525 100644 (file)
--- a/pom.xml
+++ b/pom.xml
     <!-- Zeichensatz -->
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
     <!-- Verwendete Versionen -->
-    <hibernate.version>5.0.2.Final</hibernate.version>
-    <hibernate-validator.version>5.2.2.Final</hibernate-validator.version>
+    <hibernate.version>5.2.4.Final</hibernate.version>
+    <hibernate-validator.version>5.3.1.Final</hibernate-validator.version>
     <el-api.version>3.0.0</el-api.version>
     <maven.version>3.3.3</maven.version>
     <maven-plugin-log4j.version>1.0.1</maven-plugin-log4j.version>
index 9376dd7..6b95ee2 100644 (file)
@@ -1,5 +1,4 @@
-
-    create sequence hibernate_sequence start 1 increment 1;
+create sequence hibernate_sequence start 1 increment 1;
 
     create table MainEntity (
         id int8 not null,
index f4fff24..fd15b3f 100644 (file)
@@ -7,5 +7,4 @@
         sources varchar(255),
         primary key (uuid)
     );
-
-    create index idx_test_simple_tuple on test_simple (sources, uuid);
+create index idx_test_simple_tuple on test_simple (sources, uuid);
index 15df941..38caf32 100644 (file)
@@ -7,5 +7,4 @@
         sources varchar2(255 char),
         primary key (uuid)
     );
-
-    create index idx_test_simple_tuple on test_simple (sources, uuid);
+create index idx_test_simple_tuple on test_simple (sources, uuid);
index 99bdf22..7624731 100644 (file)
@@ -7,5 +7,4 @@
         sources varchar(255),
         primary key (uuid)
     );
-
-    create index idx_test_simple_tuple on test_simple (sources, uuid);
+create index idx_test_simple_tuple on test_simple (sources, uuid);
index 180166f..eee7061 100644 (file)
@@ -1,7 +1,5 @@
-
-    create sequence TTL_AUDIT_REVISION_SEQ start with 1 increment by 10;
-
-    create sequence TTL_EVENT_SEQ start with 1 increment by 10;
+create sequence TTL_AUDIT_REVISION_SEQ start with 1 increment by 10;
+create sequence TTL_EVENT_SEQ start with 1 increment by 10;
 
     create table TTL_AUDIT_REVISION (
         ID bigint not null,
index d123767..b1387e8 100644 (file)
@@ -8,6 +8,6 @@
 
     drop table TTL_EVENT_AUD if exists;
 
-    drop sequence TTL_AUDIT_REVISION_SEQ;
+    drop sequence TTL_AUDIT_REVISION_SEQ if exists;
 
-    drop sequence TTL_EVENT_SEQ;
+    drop sequence TTL_EVENT_SEQ if exists;
index d2a1838..9b3970f 100644 (file)
@@ -1,5 +1,4 @@
-
-    create sequence hibernate_sequence start 1 increment 1;
+create sequence hibernate_sequence start 1 increment 1;
 
     create table MainEntity (
         id int8 not null,
index 10d0728..7f75d95 100644 (file)
@@ -6,5 +6,4 @@
         sources varchar(255),
         primary key (uuid)
     );
-
-    create index idx_test_simple_tuple on test_simple (sources, uuid);
+create index idx_test_simple_tuple on test_simple (sources, uuid);
index 5c57479..f1729b9 100644 (file)
@@ -1,5 +1,4 @@
-
-    create sequence hibernate_sequence start with 1 increment by 1;
+create sequence hibernate_sequence start with 1 increment by 1;
 
     create table ABTEILUNG (
         OID bigint not null,
index 9d740a5..a68d790 100644 (file)
@@ -69,6 +69,8 @@
                     <scanTestClasses>true</scanTestClasses>
                     <dialect>org.hibernate.dialect.H2Dialect</dialect>
                     <format>true</format>
+                    <username>sa</username>
+                    <password></password>
                 </configuration>
             </plugin>
         </plugins>
index f5d8769..7aaac61 100644 (file)
@@ -74,6 +74,8 @@
                     <scanTestClasses>true</scanTestClasses>
                     <dialect>org.hibernate.dialect.H2Dialect</dialect>
                     <format>true</format>
+                    <username>sa</username>
+                    <password></password>
                 </configuration>
             </plugin>
         </plugins>
index e26e99e..6aa0cc3 100644 (file)
@@ -82,6 +82,7 @@
                                <configuration>
                                        <url>jdbc:h2:mem:db1;MVCC=TRUE</url>
                                        <format>true</format>
+                                       <export>false</export>
                                </configuration>
                        </plugin>
                </plugins>
index e69de29..de92e46 100644 (file)
@@ -0,0 +1,7 @@
+create sequence hibernate_sequence start with 1 increment by 1;
+
+    create table DataPoint (
+        id bigint not null,
+        name varchar(255),
+        primary key (id)
+    );
index e79470a..de92e46 100644 (file)
@@ -1,5 +1,4 @@
-
-    create sequence hibernate_sequence start with 1 increment by 1;
+create sequence hibernate_sequence start with 1 increment by 1;
 
     create table DataPoint (
         id bigint not null,
index 9f02ba4..d89e280 100644 (file)
@@ -1,5 +1,4 @@
-
-    create sequence hibernate_sequence start with 1 increment by 1;
+create sequence hibernate_sequence start with 1 increment by 1;
 
     create table DataPoint (
         id bigint not null,
index e121f2a..ff68ea7 100644 (file)
@@ -10,6 +10,8 @@ import java.net.MalformedURLException;
 import java.net.URL;
 import java.security.NoSuchAlgorithmException;
 import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.LinkedHashSet;
@@ -45,25 +47,34 @@ import org.hibernate.boot.registry.classloading.spi.ClassLoaderService;
 import org.hibernate.boot.registry.classloading.spi.ClassLoadingException;
 import org.hibernate.boot.registry.selector.spi.StrategySelector;
 import org.hibernate.boot.spi.MetadataImplementor;
+import org.hibernate.cfg.AvailableSettings;
 import static org.hibernate.cfg.AvailableSettings.DIALECT;
 import static org.hibernate.cfg.AvailableSettings.DRIVER;
 import static org.hibernate.cfg.AvailableSettings.FORMAT_SQL;
+import static org.hibernate.cfg.AvailableSettings.HBM2DDL_DELIMITER;
 import static org.hibernate.cfg.AvailableSettings.HBM2DLL_CREATE_NAMESPACES;
 import static org.hibernate.cfg.AvailableSettings.IMPLICIT_NAMING_STRATEGY;
+import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_DRIVER;
+import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_PASSWORD;
+import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_URL;
+import static org.hibernate.cfg.AvailableSettings.JPA_JDBC_USER;
 import static org.hibernate.cfg.AvailableSettings.PASS;
 import static org.hibernate.cfg.AvailableSettings.PHYSICAL_NAMING_STRATEGY;
 import static org.hibernate.cfg.AvailableSettings.SHOW_SQL;
 import static org.hibernate.cfg.AvailableSettings.USER;
 import static org.hibernate.cfg.AvailableSettings.URL;
+import org.hibernate.engine.config.spi.ConfigurationService;
 import org.hibernate.engine.jdbc.connections.spi.ConnectionProvider;
 import org.hibernate.internal.util.config.ConfigurationException;
-import static org.hibernate.jpa.AvailableSettings.JDBC_DRIVER;
-import static org.hibernate.jpa.AvailableSettings.JDBC_PASSWORD;
-import static org.hibernate.jpa.AvailableSettings.JDBC_URL;
-import static org.hibernate.jpa.AvailableSettings.JDBC_USER;
 import org.hibernate.jpa.boot.internal.ParsedPersistenceXmlDescriptor;
 import org.hibernate.jpa.boot.internal.PersistenceXmlParser;
-import org.hibernate.jpa.boot.spi.ProviderChecker;
+import org.hibernate.tool.schema.TargetType;
+import org.hibernate.tool.schema.internal.ExceptionHandlerCollectingImpl;
+import org.hibernate.tool.schema.internal.exec.ScriptTargetOutputToFile;
+import org.hibernate.tool.schema.spi.ExecutionOptions;
+import org.hibernate.tool.schema.spi.SchemaManagementToolCoordinator;
+import org.hibernate.tool.schema.spi.ScriptTargetOutput;
+import org.hibernate.tool.schema.spi.TargetDescriptor;
 import org.scannotation.AnnotationDB;
 
 
@@ -77,7 +88,6 @@ import org.scannotation.AnnotationDB;
 public abstract class AbstractSchemaMojo extends AbstractMojo
 {
   public final static String EXPORT = "hibernate.schema.export";
-  public final static String DELIMITER = "hibernate.schema.delimiter";
   public final static String OUTPUTDIRECTORY = "project.build.outputDirectory";
   public final static String SCAN_CLASSES = "hibernate.schema.scan.classes";
   public final static String SCAN_DEPENDENCIES = "hibernate.schema.scan.dependencies";
@@ -89,6 +99,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
 
   private final Set<String> packages = new HashSet<String>();
 
+
   /**
    * The maven project.
    * <p>
@@ -109,7 +120,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
    * @required
    * @readonly
    */
-  String buildDirectory;
+  private String buildDirectory;
 
 
   /** Parameters to configure the genaration of the SQL *********************/
@@ -128,7 +139,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
    * @parameter property="hibernate.schema.export" default-value="true"
    * @since 2.0
    */
-  Boolean export;
+  private Boolean export;
 
   /**
    * Skip execution
@@ -188,10 +199,10 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
    * it is not known by Hibernate nor JPA and, hence, not picked up from
    * their configuration!
    *
-   * @parameter property="hibernate.schema.delimiter" default-value=";"
+   * @parameter property="hibernate.hbm2ddl.delimiter" default-value=";"
    * @since 1.0
    */
-  String delimiter;
+  private String delimiter;
 
   /**
    * Show the generated SQL in the command-line output.
@@ -199,7 +210,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
    * @parameter property="hibernate.show_sql"
    * @since 1.0
    */
-  Boolean show;
+  private Boolean show;
 
   /**
    * Format output-file.
@@ -207,7 +218,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
    * @parameter property="hibernate.format_sql"
    * @since 1.0
    */
-  Boolean format;
+  private Boolean format;
 
   /**
    * Specifies whether to automatically create also the database schema/catalog.
@@ -215,7 +226,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
    * @parameter property="hibernate.hbm2dll.create_namespaces" default-value="false"
    * @since 2.0
    */
-  Boolean createNamespaces;
+  private Boolean createNamespaces;
 
   /**
    * Implicit naming strategy
@@ -445,7 +456,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
       throw new MojoFailureException("Digest-Algorithm MD5 is missing!", e);
     }
 
-    SimpleConnectionProvider connectionProvider =
+    final SimpleConnectionProvider connectionProvider =
         new SimpleConnectionProvider(getLog());
 
     try
@@ -489,13 +500,16 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
       else
         getLog().debug("Configuration unchanged.");
 
+      /** Check, that the outputfile is writable */
+      final File output = getOutputFile(filename);
+
       /** Configure Hibernate */
-      StandardServiceRegistry serviceRegistry =
+      final StandardServiceRegistry serviceRegistry =
           new StandardServiceRegistryBuilder(bootstrapServiceRegitry)
               .applySettings(properties)
               .addService(ConnectionProvider.class, connectionProvider)
               .build();
-      MetadataSources sources = new MetadataSources(serviceRegistry);
+      final MetadataSources sources = new MetadataSources(serviceRegistry);
 
       /** Add the remaining class-path-elements */
       completeClassPath(classLoader);
@@ -654,6 +668,42 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
             );
       }
 
+      /** Prepare the generation of the SQL */
+      Map settings = new HashMap();
+      settings.putAll(
+          serviceRegistry
+              .getService(ConfigurationService.class)
+              .getSettings()
+              );
+      ExceptionHandlerCollectingImpl handler =
+          new ExceptionHandlerCollectingImpl();
+      ExecutionOptions options =
+          SchemaManagementToolCoordinator
+              .buildExecutionOptions(settings, handler);
+      final EnumSet<TargetType> targetTypes = EnumSet.of(TargetType.SCRIPT);
+      if (export)
+        targetTypes.add(TargetType.DATABASE);
+      TargetDescriptor target = new TargetDescriptor()
+      {
+        @Override
+        public EnumSet<TargetType> getTargetTypes()
+        {
+          return targetTypes;
+        }
+
+        @Override
+        public ScriptTargetOutput getScriptTargetOutput()
+        {
+          String charset =
+              (String)
+              serviceRegistry
+                  .getService(ConfigurationService.class)
+                  .getSettings()
+                  .get(AvailableSettings.HBM2DDL_CHARSET_NAME);
+          return new ScriptTargetOutputToFile(output, charset);
+        }
+      };
+
       /**
        * Change class-loader of current thread.
        * This is necessary, because still not all parts of Hibernate 5 use
@@ -664,11 +714,13 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
       try
       {
         thread.setContextClassLoader(classLoader);
-        build((MetadataImplementor)metadataBuilder.build());
+        build((MetadataImplementor)metadataBuilder.build(), options, target);
       }
       finally
       {
         thread.setContextClassLoader(contextClassLoader);
+        for (Exception e : handler.getExceptions())
+          getLog().error(e.getMessage());
       }
     }
     catch (MojoExecutionException e)
@@ -700,7 +752,11 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
   }
 
 
-  abstract void build(MetadataImplementor metadata)
+  abstract void build(
+      MetadataImplementor metadata,
+      ExecutionOptions options,
+      TargetDescriptor target
+      )
     throws
       MojoFailureException,
       MojoExecutionException;
@@ -856,12 +912,12 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
      * Overwrite values from properties-file if the configuration parameter is
      * known to Hibernate.
      */
-    dialect = configure(properties, dialect, DIALECT);
-    tracker.track(DELIMITER, delimiter); // << not reflected in hibernate configuration!
-    format = configure(properties, format, FORMAT_SQL);
-    createNamespaces = configure(properties, createNamespaces, HBM2DLL_CREATE_NAMESPACES);
-    implicitNamingStrategy = configure(properties, implicitNamingStrategy, IMPLICIT_NAMING_STRATEGY);
-    physicalNamingStrategy = configure(properties, physicalNamingStrategy, PHYSICAL_NAMING_STRATEGY);
+    configure(properties, dialect, DIALECT);
+    configure(properties, delimiter, HBM2DDL_DELIMITER);
+    configure(properties, format, FORMAT_SQL);
+    configure(properties, createNamespaces, HBM2DLL_CREATE_NAMESPACES);
+    configure(properties, implicitNamingStrategy, IMPLICIT_NAMING_STRATEGY);
+    configure(properties, physicalNamingStrategy, PHYSICAL_NAMING_STRATEGY);
     tracker.track(OUTPUTDIRECTORY, outputDirectory); // << not reflected in hibernate configuration!
     tracker.track(SCAN_DEPENDENCIES, scanDependencies); // << not reflected in hibernate configuration!
     tracker.track(SCAN_TESTCLASSES, scanTestClasses.toString()); // << not reflected in hibernate configuration!
@@ -881,10 +937,10 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
      * Configure the connection parameters.
      * Overwrite values from properties-file.
      */
-    driver = configure(properties, driver, DRIVER, JDBC_DRIVER);
-    url = configure(properties, url, URL, JDBC_URL);
-    username = configure(properties, username, USER, JDBC_USER);
-    password = configure(properties, password, PASS, JDBC_PASSWORD);
+    configure(properties, driver, DRIVER, JPA_JDBC_DRIVER);
+    configure(properties, url, URL, JPA_JDBC_URL);
+    configure(properties, username, USER, JPA_JDBC_USER);
+    configure(properties, password, PASS, JPA_JDBC_PASSWORD);
 
     if (properties.isEmpty())
     {
@@ -897,7 +953,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
       getLog().info("  " + entry.getKey() + " = " + entry.getValue());
   }
 
-  private String configure(
+  private void configure(
       Properties properties,
       String value,
       String key,
@@ -906,7 +962,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
   {
     value = configure(properties, value, key);
     if (value == null)
-      return properties.getProperty(alternativeKey);
+      return;
 
     if (properties.containsKey(alternativeKey))
     {
@@ -917,7 +973,6 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
           );
       properties.remove(alternativeKey);
     }
-    return properties.getProperty(alternativeKey);
   }
 
   private String configure(Properties properties, String value, String key)
@@ -936,7 +991,7 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
     return properties.getProperty(key);
   }
 
-  private boolean configure(Properties properties, Boolean value, String key)
+  private void configure(Properties properties, Boolean value, String key)
   {
     if (value != null)
     {
@@ -949,7 +1004,62 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
         getLog().debug("Using the value " + value + " for property " + key);
       properties.setProperty(key, value.toString());
     }
-    return Boolean.valueOf(properties.getProperty(key));
+  }
+
+  private File getOutputFile(String filename)
+      throws
+        MojoExecutionException
+  {
+    File output = new File(filename);
+
+    if (!output.isAbsolute())
+    {
+      // Interpret relative file path relative to build directory
+      output = new File(buildDirectory, filename);
+    }
+    getLog().debug("Output file: " + output.getPath());
+
+    // Ensure that directory path for specified file exists
+    File outFileParentDir = output.getParentFile();
+    if (null != outFileParentDir && !outFileParentDir.exists())
+    {
+      try
+      {
+        getLog().info(
+            "Creating directory path for output file:" +
+            outFileParentDir.getPath()
+            );
+        outFileParentDir.mkdirs();
+      }
+      catch (Exception e)
+      {
+        String error =
+            "Error creating directory path for output file: " + e.getMessage();
+        getLog().error(error);
+        throw new MojoExecutionException(error);
+      }
+    }
+
+    try
+    {
+      output.createNewFile();
+    }
+    catch (IOException e)
+    {
+      String error = "Error creating output file: " + e.getMessage();
+      getLog().error(error);
+      throw new MojoExecutionException(error);
+    }
+
+    if (!output.canWrite())
+    {
+      String error =
+          "Output file " + output.getAbsolutePath() + " is not writable!";
+      getLog().error(error);
+      throw new MojoExecutionException(error);
+    }
+
+    return output;
   }
 
   private void addMappings(MetadataSources sources, ModificationTracker tracker)
@@ -1172,48 +1282,41 @@ public abstract class AbstractSchemaMojo extends AbstractMojo
             PersistenceUnitTransactionType.RESOURCE_LOCAL
              );
 
-    List<ParsedPersistenceXmlDescriptor> units = parser.doResolve(properties);
+    Map<String, ParsedPersistenceXmlDescriptor> units =
+        parser.doResolve(properties);
 
     if (persistenceUnit == null)
     {
-      switch (units.size())
+      Iterator<String> names = units.keySet().iterator();
+      if (!names.hasNext())
       {
-        case 0:
-          getLog().info("Found no META-INF/persistence.xml.");
-          return null;
-        case 1:
-          getLog().info("Using persistence-unit " + units.get(0).getName());
-          return units.get(0);
-        default:
-          StringBuilder builder = new StringBuilder();
-          builder.append("No name provided and multiple persistence units found: ");
-          Iterator<ParsedPersistenceXmlDescriptor> it = units.iterator();
-          builder.append(it.next().getName());
-          while (it.hasNext())
-          {
-            builder.append(", ");
-            builder.append(it.next().getName());
-          }
-          builder.append('.');
-          throw new MojoFailureException(builder.toString());
+        getLog().info("Found no META-INF/persistence.xml.");
+        return null;
       }
-    }
 
-    for (ParsedPersistenceXmlDescriptor unit : units)
-    {
-      getLog().debug("Found persistence-unit " + unit.getName());
-      if (!unit.getName().equals(persistenceUnit))
-        continue;
+      String name = names.next();
+      if (!names.hasNext())
+      {
+          getLog().info("Using persistence-unit " + name);
+          return units.get(name);
+      }
 
-      // See if we (Hibernate) are the persistence provider
-      if (!ProviderChecker.isProvider(unit, properties))
+      StringBuilder builder = new StringBuilder();
+      builder.append("No name provided and multiple persistence units found: ");
+      builder.append(name);
+      while(names.hasNext())
       {
-        getLog().debug("Wrong provider: " + unit.getProviderClassName());
-        continue;
+        builder.append(", ");
+        builder.append(names.next());
       }
+      builder.append('.');
+      throw new MojoFailureException(builder.toString());
+    }
 
-      getLog().info("Using persistence-unit " + unit.getName());
-      return unit;
+    if (units.containsKey(persistenceUnit))
+    {
+      getLog().info("Using configured persistence-unit " + persistenceUnit);
+      return units.get(persistenceUnit);
     }
 
     throw new MojoFailureException("Could not find persistence-unit " + persistenceUnit);
index 5bafe14..23f97a8 100644 (file)
@@ -16,11 +16,17 @@ package de.juplo.plugins.hibernate;
  * limitations under the License.
  */
 
-import java.io.File;
+import java.util.Map;
 import org.apache.maven.plugin.MojoExecutionException;
 import org.apache.maven.plugin.MojoFailureException;
 import org.hibernate.boot.spi.MetadataImplementor;
-import org.hibernate.tool.hbm2ddl.SchemaExport;
+import org.hibernate.service.ServiceRegistry;
+import org.hibernate.tool.schema.SourceType;
+import org.hibernate.tool.schema.spi.ExecutionOptions;
+import org.hibernate.tool.schema.spi.SchemaManagementTool;
+import org.hibernate.tool.schema.spi.ScriptSourceInput;
+import org.hibernate.tool.schema.spi.SourceDescriptor;
+import org.hibernate.tool.schema.spi.TargetDescriptor;
 
 
 /**
@@ -41,7 +47,7 @@ public class CreateMojo extends AbstractSchemaMojo
    * relative to the project build directory
    * (<code>project.build.directory</code>).
    *
-   * @parameter property="hibernate.schema.export.create" default-value="create.sql"
+   * @parameter property="hibernate.schema.create" default-value="create.sql"
    * @since 1.0
    */
   private String outputFile;
@@ -58,43 +64,35 @@ public class CreateMojo extends AbstractSchemaMojo
 
 
   @Override
-  void build(MetadataImplementor metadata)
+  void build(
+      MetadataImplementor metadata,
+      ExecutionOptions options,
+      TargetDescriptor target
+      )
       throws
         MojoExecutionException,
         MojoFailureException
   {
-    SchemaExport schemaExport = new SchemaExport(metadata, createNamespaces);
-    schemaExport.setDelimiter(delimiter);
-    schemaExport.setFormat(format);
+    ServiceRegistry service =
+        metadata.getMetadataBuildingOptions().getServiceRegistry();
+    SchemaManagementTool tool = service.getService(SchemaManagementTool.class);
 
-    File output = new File(outputFile);
-
-    if (!output.isAbsolute())
-    {
-      // Interpret relative file path relative to build directory
-      output = new File(buildDirectory, outputFile);
-      getLog().debug("Adjusted relative path, resulting path is " + output.getPath());
-    }
-
-    // Ensure that directory path for specified file exists
-    File outFileParentDir = output.getParentFile();
-    if (null != outFileParentDir && !outFileParentDir.exists())
+    Map config = options.getConfigurationValues();
+    SourceDescriptor source = new SourceDescriptor()
     {
-      try
+         @Override
+      public SourceType getSourceType()
       {
-        getLog().info("Creating directory path for output file:" + outFileParentDir.getPath());
-        outFileParentDir.mkdirs();
+        return SourceType.METADATA;
       }
-      catch (Exception e)
+
+      @Override
+      public ScriptSourceInput getScriptSourceInput()
       {
-        getLog().error("Error creating directory path for output file: " + e.getLocalizedMessage());
+        return null;
       }
-    }
-
-    schemaExport.setOutputFile(output.getPath());
-    schemaExport.execute(false, this.export, false, true);
+    };
 
-    for (Object exception : schemaExport.getExceptions())
-      getLog().error(exception.toString());
+    tool.getSchemaCreator(config).doCreation(metadata, options, source, target);
   }
 }
index 969a219..dd40387 100644 (file)
@@ -16,11 +16,17 @@ package de.juplo.plugins.hibernate;
  * limitations under the License.
  */
 
-import java.io.File;
+import java.util.Map;
 import org.apache.maven.plugin.MojoExecutionException;
 import org.apache.maven.plugin.MojoFailureException;
 import org.hibernate.boot.spi.MetadataImplementor;
-import org.hibernate.tool.hbm2ddl.SchemaExport;
+import org.hibernate.service.ServiceRegistry;
+import org.hibernate.tool.schema.SourceType;
+import org.hibernate.tool.schema.spi.ExecutionOptions;
+import org.hibernate.tool.schema.spi.SchemaManagementTool;
+import org.hibernate.tool.schema.spi.ScriptSourceInput;
+import org.hibernate.tool.schema.spi.SourceDescriptor;
+import org.hibernate.tool.schema.spi.TargetDescriptor;
 
 
 /**
@@ -41,7 +47,7 @@ public class DropMojo extends AbstractSchemaMojo
    * relative to the project build directory
    * (<code>project.build.directory</code>).
    *
-   * @parameter property="hibernate.schema.export.drop" default-value="drop.sql"
+   * @parameter property="hibernate.schema.drop" default-value="drop.sql"
    * @since 1.0
    */
   private String outputFile;
@@ -58,43 +64,35 @@ public class DropMojo extends AbstractSchemaMojo
 
 
   @Override
-  void build(MetadataImplementor metadata)
+  void build(
+      MetadataImplementor metadata,
+      ExecutionOptions options,
+      TargetDescriptor target
+      )
       throws
         MojoExecutionException,
         MojoFailureException
   {
-    SchemaExport schemaExport = new SchemaExport(metadata, createNamespaces);
-    schemaExport.setDelimiter(delimiter);
-    schemaExport.setFormat(format);
+    ServiceRegistry service =
+        metadata.getMetadataBuildingOptions().getServiceRegistry();
+    SchemaManagementTool tool = service.getService(SchemaManagementTool.class);
 
-    File output = new File(outputFile);
-
-    if (!output.isAbsolute())
-    {
-      // Interpret relative file path relative to build directory
-      output = new File(buildDirectory, outputFile);
-      getLog().debug("Adjusted relative path, resulting path is " + output.getPath());
-    }
-
-    // Ensure that directory path for specified file exists
-    File outFileParentDir = output.getParentFile();
-    if (null != outFileParentDir && !outFileParentDir.exists())
+    Map config = options.getConfigurationValues();
+    SourceDescriptor source = new SourceDescriptor()
     {
-      try
+         @Override
+      public SourceType getSourceType()
       {
-        getLog().info("Creating directory path for output file:" + outFileParentDir.getPath());
-        outFileParentDir.mkdirs();
+        return SourceType.METADATA;
       }
-      catch (Exception e)
+
+      @Override
+      public ScriptSourceInput getScriptSourceInput()
       {
-        getLog().error("Error creating directory path for output file: " + e.getLocalizedMessage());
+        return null;
       }
-    }
-
-    schemaExport.setOutputFile(output.getPath());
-    schemaExport.execute(false, this.export, true, false);
+    };
 
-    for (Object exception : schemaExport.getExceptions())
-      getLog().error(exception.toString());
+    tool.getSchemaDropper(config).doDrop(metadata, options, source, target);
   }
 }