Sample programs to reproduce the isuue are below.
import com.f2i.energisticsStandardsApi.StringVector;
import com.f2i.energisticsStandardsApi.common.AbstractHdfProxy;
import com.f2i.energisticsStandardsApi.common.DataObjectRepository;
import com.f2i.energisticsStandardsApi.common.EpcDocument;
import com.f2i.energisticsStandardsApi.resqml2_0_1.WellboreFeature;
import java.io.File;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
public class FesapiWellboreFeatureIssue {
/**
* Loading the FesapiCpp native library
*/
static {
try {
System.loadLibrary("FesapiCppd.1.2.3.0");
}
catch (UnsatisfiedLinkError e) {
System.out.println("UnsatisfiedLinkError : " + e.toString());
}
}
protected EpcDocument epcDoc = null;
protected DataObjectRepository repo = null;
protected AbstractHdfProxy hdfProxy = null;
protected Logger logger = Logger.getLogger(FesapiWellboreFeatureIssue.class.getName());
protected Map<String, Object> lockMap = null;
protected String filePath = "";
private void openResqmlFile(String filePath) {
try {
File file = new File(filePath);
if(file.exists()){
this.epcDoc = new EpcDocument(filePath);
this.repo = new DataObjectRepository();
this.filePath= filePath;
String status = epcDoc.deserializeInto(repo, DataObjectRepository.openingMode.READ_WRITE);
if (!status.isEmpty()) {
logger.log(Level.INFO, "Status : " + status);
}
}else{
this.epcDoc = new EpcDocument(filePath);
this.repo = new DataObjectRepository();
this.filePath= filePath;
}
}catch(Exception ex) {
throw ex;
}
}
protected AbstractHdfProxy openHDF5Proxy() throws Exception{
if(repo==null) {
throw new Exception("EPC File is not opened or created");
}
long hdfProxyCounter = repo.getHdfProxyCount();
if(hdfProxyCounter <= 0) {
hdfProxy = repo.createHdfProxy("", "External HDF5 proxy", epcDoc.getStorageDirectory(),
epcDoc.getName() + ".h5", DataObjectRepository.openingMode.READ_WRITE);
}else if(repo.getDefaultHdfProxy()!=null){
hdfProxy = repo.getDefaultHdfProxy();
}else {
hdfProxy = repo.getHdfProxy(0);
}
return hdfProxy ;
}
protected void createResqmlFile(String filePath) {
try {
this.epcDoc = new EpcDocument(filePath);
this.repo = new DataObjectRepository();
}catch(Exception ex) {
logger.log(Level.SEVERE, " Exception while creating the Resqml File ",ex);
throw ex;
}
}
protected void closeResqmlFile() {
if (repo != null){
repo.delete();
}
if (epcDoc != null){
epcDoc.delete();
}
}
protected void openOrCreateEPCFile(String resqmlFilePath) {
File f = new File(resqmlFilePath);
if(f.exists() && !f.isDirectory()) {
openResqmlFile(resqmlFilePath);
}
else {
createResqmlFile(resqmlFilePath);
}
}
public String createWellboreFeature() {
String uuid="";
try {
String title ="WellboreFeature ABC";
WellboreFeature witsmlWellboreFeature = repo.createWellboreFeature(uuid, title);
epcDoc.serializeFrom(repo);
uuid=witsmlWellboreFeature.getUuid();
}
catch(Exception e) {
logger.log(Level.SEVERE, "************ WellboreFeature with "+uuid+" writing failed to the File : "+filePath);
e.printStackTrace();
}
logger.log(Level.INFO, "************ WellboreFeature with "+uuid+" is written to the File : "+filePath);
return uuid;
}
public String updateWellboreFeature(String uuid) {
try {
String title ="WellboreFeature ABC";
WellboreFeature witsmlWellboreFeature = repo.createWellboreFeature(uuid, title);
epcDoc.serializeFrom(repo);
uuid=witsmlWellboreFeature.getUuid();
}
catch(Exception e) {
logger.log(Level.SEVERE, "************ WellboreFeature with "+uuid+" writing failed to the File : "+filePath);
e.printStackTrace();
}
logger.log(Level.INFO, "************ WellboreFeature with "+uuid+" is written to the File : "+filePath);
return uuid;
}
public static void main(String[] args) {
String filePath = System.currentTimeMillis()+".epc";
FesapiWellboreFeatureIssue fesapiWellboreFeatureIssue= new FesapiWellboreFeatureIssue();
fesapiWellboreFeatureIssue.openResqmlFile(filePath);
fesapiWellboreFeatureIssue.createWellboreFeature();
fesapiWellboreFeatureIssue.closeResqmlFile();
fesapiWellboreFeatureIssue.openOrCreateEPCFile(filePath);
StringVector allUuids = fesapiWellboreFeatureIssue.repo.getUuids();
for (String uuid : allUuids){
String className= fesapiWellboreFeatureIssue.repo.getDataObjectByUuid(uuid).getClass().toString();
if(className.equals("class com.f2i.energisticsStandardsApi.resqml2_0_1.WellboreFeature")){
fesapiWellboreFeatureIssue.updateWellboreFeature(uuid);
fesapiWellboreFeatureIssue.closeResqmlFile();
}
}
System.out.println("Programme exist");
}
}
=============================================================================
import com.f2i.energisticsStandardsApi.*;
import com.f2i.energisticsStandardsApi.common.AbstractHdfProxy;
import com.f2i.energisticsStandardsApi.common.DataObjectRepository;
import com.f2i.energisticsStandardsApi.common.EpcDocument;
import com.f2i.energisticsStandardsApi.resqml2_0_1.*;
import java.io.File;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
public class FesapiSeismicLatticeIssue {
/**
* Loading the FesapiCpp native library
*/
static {
try {
System.loadLibrary("FesapiCppd.1.2.3.0");
}
catch (UnsatisfiedLinkError e) {
System.out.println("UnsatisfiedLinkError : " + e.toString());
}
}
protected EpcDocument epcDoc = null;
protected DataObjectRepository repo = null;
protected AbstractHdfProxy hdfProxy = null;
protected Logger logger = Logger.getLogger(FesapiSeismicLatticeIssue.class.getName());
protected Map<String, Object> lockMap = null;
protected String filePath = "";
private void openResqmlFile(String filePath) {
try {
File file = new File(filePath);
if(file.exists()){
this.epcDoc = new EpcDocument(filePath);
this.repo = new DataObjectRepository();
this.filePath= filePath;
String status = epcDoc.deserializeInto(repo, DataObjectRepository.openingMode.READ_WRITE);
if (!status.isEmpty()) {
logger.log(Level.INFO, "Status : " + status);
}
}else{
this.epcDoc = new EpcDocument(filePath);
this.repo = new DataObjectRepository();
this.filePath= filePath;
}
}catch(Exception ex) {
throw ex;
}
}
protected AbstractHdfProxy openHDF5Proxy() throws Exception{
if(repo==null) {
throw new Exception("EPC File is not opened or created");
}
long hdfProxyCounter = repo.getHdfProxyCount();
if(hdfProxyCounter <= 0) {
hdfProxy = repo.createHdfProxy("", "External HDF5 proxy", epcDoc.getStorageDirectory(),
epcDoc.getName() + ".h5", DataObjectRepository.openingMode.READ_WRITE);
}else if(repo.getDefaultHdfProxy()!=null){
hdfProxy = repo.getDefaultHdfProxy();
}else {
hdfProxy = repo.getHdfProxy(0);
}
return hdfProxy ;
}
protected void createResqmlFile(String filePath) {
try {
this.epcDoc = new EpcDocument(filePath);
this.repo = new DataObjectRepository();
}catch(Exception ex) {
logger.log(Level.SEVERE, " Exception while creating the Resqml File ",ex);
throw ex;
}
}
protected void closeResqmlFile() {
if (repo != null){
repo.delete();
}
if (epcDoc != null){
epcDoc.delete();
}
}
protected void openOrCreateEPCFile(String resqmlFilePath) {
File f = new File(resqmlFilePath);
if(f.exists() && !f.isDirectory()) {
openResqmlFile(resqmlFilePath);
}
else {
createResqmlFile(resqmlFilePath);
}
}
public String createSeismicLattice() {
String uuid="";
try {
String title ="Seismic Lattice ABC";
int inlineIncrement = 1;
int crosslineIncrement = 1;
long originInline = 1;
long originCrossline = 1;
long inlineCount = 100;
long crosslineCount = 100;
SeismicLatticeFeature seismicLatticeFeature = repo.createSeismicLattice(uuid,title,inlineIncrement,crosslineIncrement,
originInline, originCrossline,inlineCount,crosslineCount);
epcDoc.serializeFrom(repo);
uuid=seismicLatticeFeature.getUuid();
}
catch(Exception e) {
logger.log(Level.SEVERE, "************ SeismicLatticeFeature with "+uuid+" writing failed to the File : "+filePath);
e.printStackTrace();
}
logger.log(Level.INFO, "************ SeismicLatticeFeature with "+uuid+" is written to the File : "+filePath);
return uuid;
}
public String updateSeismicLattice(String uuid) {
try {
String title ="Seismic Lattice ABC";
int inlineIncrement = 1;
int crosslineIncrement = 1;
long originInline = 1;
long originCrossline = 1;
long inlineCount = 100;
long crosslineCount = 100;
SeismicLatticeFeature seismicLatticeFeature = repo.createSeismicLattice(uuid,title,inlineIncrement,crosslineIncrement,
originInline, originCrossline,inlineCount,crosslineCount);
epcDoc.serializeFrom(repo);
uuid=seismicLatticeFeature.getUuid();
}
catch(Exception e) {
logger.log(Level.SEVERE, "************ SeismicLatticeFeature with "+uuid+" writing failed to the File : "+filePath);
e.printStackTrace();
}
logger.log(Level.INFO, "************ SeismicLatticeFeature with "+uuid+" is written to the File : "+filePath);
return uuid;
}
public static void main(String[] args) {
String filePath = System.currentTimeMillis()+".epc";
FesapiSeismicLatticeIssue fesapiSeismicLatticeIssue= new FesapiSeismicLatticeIssue();
fesapiSeismicLatticeIssue.openResqmlFile(filePath);
fesapiSeismicLatticeIssue.createSeismicLattice();
fesapiSeismicLatticeIssue.closeResqmlFile();
fesapiSeismicLatticeIssue.openOrCreateEPCFile(filePath);
StringVector allUuids = fesapiSeismicLatticeIssue.repo.getUuids();
for (String uuid : allUuids){
String className= fesapiSeismicLatticeIssue.repo.getDataObjectByUuid(uuid).getClass().toString();
if(className.equals("class com.f2i.energisticsStandardsApi.resqml2_0_1.SeismicLatticeFeature")){
fesapiSeismicLatticeIssue.updateSeismicLattice(uuid);
fesapiSeismicLatticeIssue.closeResqmlFile();
}
}
System.out.println("Programme exist");
}