Not able to Update WellboreFeature/SeismicLatticeFeature using fesapi 1.2.3.0 (Java Wrapper)

Sample programs to reproduce the isuue are below.

import com.f2i.energisticsStandardsApi.StringVector;
import com.f2i.energisticsStandardsApi.common.AbstractHdfProxy;
import com.f2i.energisticsStandardsApi.common.DataObjectRepository;
import com.f2i.energisticsStandardsApi.common.EpcDocument;
import com.f2i.energisticsStandardsApi.resqml2_0_1.WellboreFeature;

import java.io.File;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;

public class FesapiWellboreFeatureIssue {

/**
 * Loading the FesapiCpp native library
 */
static {
	try {
		System.loadLibrary("FesapiCppd.1.2.3.0");
	}
	catch (UnsatisfiedLinkError e) {
		System.out.println("UnsatisfiedLinkError : " + e.toString());
	}
}

protected EpcDocument           epcDoc   = null;
protected DataObjectRepository  repo     = null;
protected AbstractHdfProxy      hdfProxy = null;
protected Logger logger   = Logger.getLogger(FesapiWellboreFeatureIssue.class.getName());
protected Map<String, Object> lockMap  = null;
protected String                filePath = "";

private void openResqmlFile(String filePath) {
	try {
		File file = new File(filePath);
		if(file.exists()){
			this.epcDoc = new EpcDocument(filePath);
			this.repo = new DataObjectRepository();
			this.filePath= filePath;
			String status = epcDoc.deserializeInto(repo, DataObjectRepository.openingMode.READ_WRITE);
			if (!status.isEmpty()) {
				logger.log(Level.INFO, "Status : " + status);
			}
		}else{
			this.epcDoc = new EpcDocument(filePath);
			this.repo = new DataObjectRepository();
			this.filePath= filePath;

		}


	}catch(Exception ex) {
		throw ex;
	}
}
protected AbstractHdfProxy openHDF5Proxy() throws Exception{
	if(repo==null) {
		throw new Exception("EPC File is not opened or created");
	}
	long hdfProxyCounter = repo.getHdfProxyCount();
	if(hdfProxyCounter <= 0) {
		hdfProxy = repo.createHdfProxy("", "External HDF5 proxy", epcDoc.getStorageDirectory(),
				epcDoc.getName() + ".h5", DataObjectRepository.openingMode.READ_WRITE);
	}else if(repo.getDefaultHdfProxy()!=null){
		hdfProxy = repo.getDefaultHdfProxy();
	}else {
		hdfProxy = repo.getHdfProxy(0);
	}
	return hdfProxy ;

}
protected void createResqmlFile(String filePath) {
	try {
		this.epcDoc = new EpcDocument(filePath);
		this.repo = new DataObjectRepository();
	}catch(Exception ex) {
		logger.log(Level.SEVERE, " Exception while creating the Resqml File ",ex);
		throw ex;
	}
}

protected void closeResqmlFile() {

	if (repo != null){
		repo.delete();
	}
	if (epcDoc != null){
		epcDoc.delete();
	}

}
protected void openOrCreateEPCFile(String resqmlFilePath) {
	File f = new File(resqmlFilePath);
	if(f.exists() && !f.isDirectory()) {
		openResqmlFile(resqmlFilePath);
	}
	else {
		createResqmlFile(resqmlFilePath);
	}
}

public String createWellboreFeature() {
	String uuid="";
	try {

		String  title				="WellboreFeature ABC";
		WellboreFeature witsmlWellboreFeature = repo.createWellboreFeature(uuid, title);

		epcDoc.serializeFrom(repo);
		uuid=witsmlWellboreFeature.getUuid();

	}
	catch(Exception e) {
		logger.log(Level.SEVERE, "************ WellboreFeature with "+uuid+" writing failed to the File : "+filePath);

		e.printStackTrace();
	}
	logger.log(Level.INFO, "************ WellboreFeature  with "+uuid+" is written to the File : "+filePath);

	return uuid;
}

public String updateWellboreFeature(String uuid) {

	try {

		String  title				="WellboreFeature ABC";
		WellboreFeature witsmlWellboreFeature = repo.createWellboreFeature(uuid, title);

		epcDoc.serializeFrom(repo);
		uuid=witsmlWellboreFeature.getUuid();

	}
	catch(Exception e) {
		logger.log(Level.SEVERE, "************ WellboreFeature with "+uuid+" writing failed to the File : "+filePath);

		e.printStackTrace();
	}
	logger.log(Level.INFO, "************ WellboreFeature  with "+uuid+" is written to the File : "+filePath);

	return uuid;
}
public static void main(String[] args) {
	String filePath = System.currentTimeMillis()+".epc";
	FesapiWellboreFeatureIssue fesapiWellboreFeatureIssue= new FesapiWellboreFeatureIssue();
	fesapiWellboreFeatureIssue.openResqmlFile(filePath);
	fesapiWellboreFeatureIssue.createWellboreFeature();
	fesapiWellboreFeatureIssue.closeResqmlFile();

	fesapiWellboreFeatureIssue.openOrCreateEPCFile(filePath);

	StringVector allUuids = fesapiWellboreFeatureIssue.repo.getUuids();
	for (String uuid : allUuids){
		String className= fesapiWellboreFeatureIssue.repo.getDataObjectByUuid(uuid).getClass().toString();
		if(className.equals("class com.f2i.energisticsStandardsApi.resqml2_0_1.WellboreFeature")){

			fesapiWellboreFeatureIssue.updateWellboreFeature(uuid);
			fesapiWellboreFeatureIssue.closeResqmlFile();
		}
	}


		System.out.println("Programme exist");
}

}

=============================================================================
import com.f2i.energisticsStandardsApi.*;
import com.f2i.energisticsStandardsApi.common.AbstractHdfProxy;
import com.f2i.energisticsStandardsApi.common.DataObjectRepository;
import com.f2i.energisticsStandardsApi.common.EpcDocument;

import com.f2i.energisticsStandardsApi.resqml2_0_1.*;

import java.io.File;

import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;

public class FesapiSeismicLatticeIssue {

/**
 * Loading the FesapiCpp native library
 */
static {
	try {
		System.loadLibrary("FesapiCppd.1.2.3.0");
	}
	catch (UnsatisfiedLinkError e) {
		System.out.println("UnsatisfiedLinkError : " + e.toString());
	}
}

protected EpcDocument           epcDoc   = null;
protected DataObjectRepository  repo     = null;
protected AbstractHdfProxy      hdfProxy = null;
protected Logger logger   = Logger.getLogger(FesapiSeismicLatticeIssue.class.getName());
protected Map<String, Object> lockMap  = null;
protected String                filePath = "";

private void openResqmlFile(String filePath) {
	try {
		File file = new File(filePath);
		if(file.exists()){
			this.epcDoc = new EpcDocument(filePath);
			this.repo = new DataObjectRepository();
			this.filePath= filePath;
			String status = epcDoc.deserializeInto(repo, DataObjectRepository.openingMode.READ_WRITE);
			if (!status.isEmpty()) {
				logger.log(Level.INFO, "Status : " + status);
			}
		}else{
			this.epcDoc = new EpcDocument(filePath);
			this.repo = new DataObjectRepository();
			this.filePath= filePath;

		}


	}catch(Exception ex) {
		throw ex;
	}
}
protected AbstractHdfProxy openHDF5Proxy() throws Exception{
	if(repo==null) {
		throw new Exception("EPC File is not opened or created");
	}
	long hdfProxyCounter = repo.getHdfProxyCount();
	if(hdfProxyCounter <= 0) {
		hdfProxy = repo.createHdfProxy("", "External HDF5 proxy", epcDoc.getStorageDirectory(),
				epcDoc.getName() + ".h5", DataObjectRepository.openingMode.READ_WRITE);
	}else if(repo.getDefaultHdfProxy()!=null){
		hdfProxy = repo.getDefaultHdfProxy();
	}else {
		hdfProxy = repo.getHdfProxy(0);
	}
	return hdfProxy ;

}
protected void createResqmlFile(String filePath) {
	try {
		this.epcDoc = new EpcDocument(filePath);
		this.repo = new DataObjectRepository();
	}catch(Exception ex) {
		logger.log(Level.SEVERE, " Exception while creating the Resqml File ",ex);
		throw ex;
	}
}

protected void closeResqmlFile() {

	if (repo != null){
		repo.delete();
	}
	if (epcDoc != null){
		epcDoc.delete();
	}

}
protected void openOrCreateEPCFile(String resqmlFilePath) {
	File f = new File(resqmlFilePath);
	if(f.exists() && !f.isDirectory()) {
		openResqmlFile(resqmlFilePath);
	}
	else {
		createResqmlFile(resqmlFilePath);
	}
}

public String createSeismicLattice() {
	String uuid="";
	try {

		String  title				="Seismic Lattice ABC";
		int 	inlineIncrement		= 1;
		int 	crosslineIncrement  = 1;
		long    originInline		= 1;
		long    originCrossline		= 1;
		long    inlineCount			= 100;
		long    crosslineCount		= 100;

		SeismicLatticeFeature seismicLatticeFeature = repo.createSeismicLattice(uuid,title,inlineIncrement,crosslineIncrement,
				originInline, originCrossline,inlineCount,crosslineCount);

		epcDoc.serializeFrom(repo);
		uuid=seismicLatticeFeature.getUuid();

	}
	catch(Exception e) {
		logger.log(Level.SEVERE, "************ SeismicLatticeFeature with "+uuid+" writing failed to the File : "+filePath);

		e.printStackTrace();
	}
	logger.log(Level.INFO, "************ SeismicLatticeFeature  with "+uuid+" is written to the File : "+filePath);

	return uuid;
}

public String updateSeismicLattice(String uuid) {

	try {

		String  title				="Seismic Lattice ABC";
		int 	inlineIncrement		= 1;
		int 	crosslineIncrement  = 1;
		long    originInline		= 1;
		long    originCrossline		= 1;
		long    inlineCount			= 100;
		long    crosslineCount		= 100;

		SeismicLatticeFeature seismicLatticeFeature = repo.createSeismicLattice(uuid,title,inlineIncrement,crosslineIncrement,
				originInline, originCrossline,inlineCount,crosslineCount);
		epcDoc.serializeFrom(repo);
		uuid=seismicLatticeFeature.getUuid();

	}
	catch(Exception e) {
		logger.log(Level.SEVERE, "************ SeismicLatticeFeature with "+uuid+" writing failed to the File : "+filePath);

		e.printStackTrace();
	}
	logger.log(Level.INFO, "************ SeismicLatticeFeature  with "+uuid+" is written to the File : "+filePath);

	return uuid;
}
public static void main(String[] args) {
	String filePath = System.currentTimeMillis()+".epc";
	FesapiSeismicLatticeIssue fesapiSeismicLatticeIssue= new FesapiSeismicLatticeIssue();
	fesapiSeismicLatticeIssue.openResqmlFile(filePath);
	fesapiSeismicLatticeIssue.createSeismicLattice();
	fesapiSeismicLatticeIssue.closeResqmlFile();

	fesapiSeismicLatticeIssue.openOrCreateEPCFile(filePath);

	StringVector allUuids = fesapiSeismicLatticeIssue.repo.getUuids();
	for (String uuid : allUuids){
		String className= fesapiSeismicLatticeIssue.repo.getDataObjectByUuid(uuid).getClass().toString();
		if(className.equals("class com.f2i.energisticsStandardsApi.resqml2_0_1.SeismicLatticeFeature")){

			fesapiSeismicLatticeIssue.updateSeismicLattice(uuid);
			fesapiSeismicLatticeIssue.closeResqmlFile();
		}
	}


		System.out.println("Programme exist");
}

}

Hi @Husnain ,

I am not sure to perfectly understand your code but it looks that you don’t update anything but you actually create a duplicate which is not allowed.
Indeed, in your method updateWellboreFeature(String uuid), it looks that you actually create another wellboreFeature with the same uuid in the repository (see line WellboreFeature witsmlWellboreFeature = repo.createWellboreFeature(uuid, title)).
Having two dataobjects with the same uuid in the same repository is not allowed. This could be the reason of the exception you are facing.

Regards

As side notes:

  • Do not use delete() method please in closeResqmlFile() but clear() for repo and close() for epcDoc instead. delete() is mainly done for SWIG internal management.
  • It is not recommended at all to serialize a repository in a file which is currently opened for deserialization. HdfProxy would probably be in conflict at some points. Please serialize your repo in a different file and delete the previous opened file if necessary. To FESAPI, EPC documents are considered transient and should not be used as persistent databases.

In

updateWellboreFeature(String uuid),

instead of creating a duplicate using

WellboreFeature witsmlWellboreFeature = repo.createWellboreFeature(uuid, title);

you probably want to get the wellboreFeature instead using

WellboreFeature witsmlWellboreFeature = (WellboreFeature) repo.getDataObjectByUuid(uuid);

And then to update for example the title

witsmlWellboreFeature.setTitle(“Another title to update”);

Finally you can serialize your repository with this modified title in a (different) file.

Example of the guessed change

public String updateWellboreFeature(String uuid) {
try {

  	WellboreFeature witsmlWellboreFeature = (WellboreFeature) repo.getDataObjectByUuid(uuid);
  	witsmlWellboreFeature.setTitle("Another title to update");

  	epcDoc.serializeFrom(repo);
  	uuid=witsmlWellboreFeature.getUuid();

  }
  catch(Exception e) {
  	logger.log(Level.SEVERE, "************ WellboreFeature with "+uuid+" writing failed to the File : "+filePath);

  	e.printStackTrace();
  }
  logger.log(Level.INFO, "************ WellboreFeature  with "+uuid+" is written to the File : "+filePath);

  return uuid;

}

Looking at my side notes, notice that even if this should work, it is risky since :

  • you write in the same file you are opening.
  • you use delete() in closeResqmlFile()

Regarding delete(), I remember we had to do this to avoid crashes that were happening on in progress version of 1.2+ that we picked up mid-2020. Seems like these issues are addressed in released version 1.2.3.0. We will remove delete() in favor of clear() for repo and close() for epcDoc.

@philippeVerney,
We have re-written the code as follows:

	String fileName = System.currentTimeMillis() + "";
	String filePath = fileName + ".epc";
	
	EpcDocument          srcEpcDoc = new EpcDocument(filePath);
	DataObjectRepository repo      = new DataObjectRepository();

	repo.createHdfProxy("", "External HDF5 proxy", srcEpcDoc.getStorageDirectory(),
			                                srcEpcDoc.getName() + ".h5", DataObjectRepository.openingMode.READ_WRITE);

	
	// Create a new WellboreFeature
	String  title				    = "WellboreFeature ABC";
	WellboreFeature wellboreFeature = repo.createWellboreFeature("", title);
	
	String wellborefeatureUuid = wellboreFeature.getUuid();
	srcEpcDoc.serializeFrom(repo);
	
	srcEpcDoc.close();
	repo.clear();
	
	
	// Deserialize from existing .epc file
	srcEpcDoc = new EpcDocument(filePath);
	repo = new DataObjectRepository();
	srcEpcDoc.deserializeInto(repo, DataObjectRepository.openingMode.OVERWRITE);
	
	title				          = "WellboreFeature ABC - Updated";
	// wellboreFeature = repo.createWellboreFeature(wellborefeatureUuid, title); 
	wellboreFeature = (WellboreFeature)repo.getDataObjectByUuid(wellborefeatureUuid);
	wellboreFeature.setTitle(title);

	// Serialize to file
	srcEpcDoc.serializeFrom(repo);
	
	srcEpcDoc.close();
	repo.clear();

This is similar to what you recommended - we are good with this.

Philippe: HdfProxy would probably be in conflict at some points. Please serialize your repo in a different file and delete the previous opened file if necessary. To FESAPI, EPC documents are considered transient and should not be used as persistent databases.

Wanted to ask why is it risky to write back to the same .epc file ?

We are deserializing in OVERWRITE mode precisely for this purpose only, isn’t it ?
srcEpcDoc.deserializeInto(repo, DataObjectRepository.openingMode.OVERWRITE);

We are kind of treating the resqml file as a persistent data source in our workflows:

  • Write Wellbore to a file
  • Add Trajectories to the same file at a later point in time
  • Add more data types to the same file if needed at a later point in time etc.

If we can’t serialize it back to the same file, the above workflows become very difficult.

My understanding is Resqml is supposed to support updating existing data and adding new data to existing file - and this is working form what we tested so far.

Please advise.

Thanks,
Shakir

Basically because it is not a lot tested with FESAPI. Some do it and FESAPI has been fixed for that but the original intent of FESAPI was just to import and export (it has evolved and it still evolves), not to manage data as you might expect with a database engine.

You are right and I was wrong: the DataObjectRepository.openingMode.OVERWRITE is clearly one of the fix we did to provide some basic functionalities for persisted files.

Some clear risks if you persist files:

  • Deletion of data is not supported at all with FESAPI
  • Modifiying a relationship between two dataobjects is rudimentary. Don’t expect too much. On the other hand, modifying a dataobject attribute (except uuid) such as a title should be OK.
  • Concurrency is not supported. Don’t write with a process and read with another one, don’t even write with two different processes except if you really know what you do.

The workflow you describe looks OK with FESAPI even if not a lot tested. Adding dataobjects (even relationships between those dataobjects) should be OK.

My understanding is quite different. RESQML has only been done to transfer a dataobject from A to B and to keep tracability (of updates for example) on dataobjects.
So, RESQML is not responsible about how the data are stored and/or updated but just transfered.
The EPC can be decided to be the store, we know some companies doing that (we even do that in our own ETP server example and we see big limits, see above risks, compared to a database). On the other hand, the EPC can be decided to be only the transfer and the store is a real database (more OSDU compliant by the way), we know some other companies doing that. RESQML just states that EPC must be used for file transfer, the rest is up to the adopters.

Thanks for your insights, @philippeVerney !