-
Notifications
You must be signed in to change notification settings - Fork 6
Open
Description
I am trying to run a small demo that would perform a prediction on a dummy image. But it does not seem to be working an I am not really sure why.
This is the code that I am using:
package net.stefanhahmann.cellpose.jdll;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import net.imglib2.RandomAccessibleInterval;
import net.imglib2.img.Img;
import net.imglib2.img.array.ArrayImgs;
import net.imglib2.type.NativeType;
import net.imglib2.type.numeric.RealType;
import net.imglib2.type.numeric.real.FloatType;
import org.apache.commons.compress.archivers.ArchiveException;
import io.bioimage.modelrunner.apposed.appose.MambaInstallException;
import io.bioimage.modelrunner.exceptions.LoadModelException;
import io.bioimage.modelrunner.exceptions.RunModelException;
import io.bioimage.modelrunner.model.special.cellpose.Cellpose;
public class CellPose3DDummyPrediction
{
public static < T extends RealType< T > & NativeType< T > > void main( String[] args )
throws MambaInstallException, IOException, URISyntaxException, ArchiveException, InterruptedException, ExecutionException,
LoadModelException, RunModelException
{
Cellpose.installRequirements();
try (Cellpose cellpose = Cellpose.fromPretained( "cyto2", true ))
{
cellpose.loadModel();
final Img< FloatType > dummyImg = ArrayImgs.floats( 30, 30, 30 );
List< RandomAccessibleInterval< FloatType > > rais = new ArrayList<>();
rais.add( dummyImg );
cellpose.inference( rais );
System.out.println( "Success" );
}
}
}
I get the following output:
[SERVICE-1] {"task":"b108a2f9-ca71-45e4-921a-9a7f75f67e21","requestType":"EXECUTE","inputs":{},"script":"if \u0027denoise\u0027 not in globals().keys():\r\n from cellpose import denoise\r\n globals()[\u0027denoise\u0027] \u003d denoise\r\nif \u0027np\u0027 not in globals().keys():\r\n import numpy as np\r\n globals()[\u0027np\u0027] \u003d np\r\nif \u0027os\u0027 not in globals().keys():\r\n import os\r\n globals()[\u0027os\u0027] \u003d os\r\nif \u0027shared_memory\u0027 not in globals().keys():\r\n from multiprocessing import shared_memory\r\n globals()[\u0027shared_memory\u0027] \u003d shared_memory\r\nmodel_832b9b9f_7340_423d_8d6c_da4cfab0a107 \u003d denoise.CellposeDenoiseModel(gpu\u003dFalse, pretrained_model\u003dr\u0027C:\\Users\\stha735e\\git\\stardist3d-java\\models\\cyto2_19032025_173423\\cyto2torch_0\u0027)\r\nglobals()[\u0027model_832b9b9f_7340_423d_8d6c_da4cfab0a107\u0027] \u003d model_832b9b9f_7340_423d_8d6c_da4cfab0a107\r\ndef handle_output_list(out_list):\r\n for outs_i in out_list:\r\n if type(outs_i) \u003d\u003d np.ndarray:\r\n shm \u003d shared_memory.SharedMemory(create\u003dTrue, size\u003douts_i.nbytes)\r\n sh_np_array \u003d np.ndarray(outs_i.shape, dtype\u003douts_i.dtype, buffer\u003dshm.buf)\r\n np.copyto(sh_np_array, outs_i)\r\n shms_34a15ecb_fa95_44b7_ae49_ef6d3922c58c.append(shm)\r\n shm_names_b8a50798_16f7_4ef8_ab8a_33a8672f69db.append(shm.name)\r\n dtypes_8a89b85b_0ada_42c4_945e_f708b4f67208.append(str(outs_i.dtype))\r\n dims_6a406d28_1c3f_4de2_bb2e_a33e694c3e1a.append(outs_i.shape)\r\n elif str(type(outs_i)) \u003d\u003d \"\u003cclass \u0027torch.Tensor\u0027\u003e\":\r\n if \u0027torch\u0027 not in globals().keys():\r\n import torch\r\n globals()[\u0027torch\u0027] \u003d torch\r\n shm \u003d shared_memory.SharedMemory(create\u003dTrue, size\u003douts_i.numel() * outs_i.element_size())\r\n np_arr \u003d np.ndarray(outs_i.shape, dtype\u003douts_i.dtype.name, buffer\u003dshm.buf)\r\n tensor_np_view \u003d torch.from_numpy(np_arr)\r\n tensor_np_view.copy_(outs_i)\r\n shms_34a15ecb_fa95_44b7_ae49_ef6d3922c58c.append(shm)\r\n shm_names_b8a50798_16f7_4ef8_ab8a_33a8672f69db.append(shm.name)\r\n dtypes_8a89b85b_0ada_42c4_945e_f708b4f67208.append(outs_i.dtype.name)\r\n dims_6a406d28_1c3f_4de2_bb2e_a33e694c3e1a.append(outs_i.shape)\r\n elif type(outs_i) \u003d\u003d int:\r\n shm \u003d shared_memory.SharedMemory(create\u003dTrue, size\u003d8)\r\n shm.buf[:8] \u003d outs_i.to_bytes(8, byteorder\u003d\u0027little\u0027, signed\u003dTrue)\r\n shms_34a15ecb_fa95_44b7_ae49_ef6d3922c58c.append(shm)\r\n shm_names_b8a50798_16f7_4ef8_ab8a_33a8672f69db.append(shm.name)\r\n dtypes_8a89b85b_0ada_42c4_945e_f708b4f67208.append(\u0027int64\u0027)\r\n dims_6a406d28_1c3f_4de2_bb2e_a33e694c3e1a.append((1))\r\n elif type(outs_i) \u003d\u003d float:\r\n shm \u003d shared_memory.SharedMemory(create\u003dTrue, size\u003d8)\r\n shm.buf[:8] \u003d outs_i.to_bytes(8, byteorder\u003d\u0027little\u0027, signed\u003dTrue)\r\n shms_34a15ecb_fa95_44b7_ae49_ef6d3922c58c.append(shm)\r\n shm_names_b8a50798_16f7_4ef8_ab8a_33a8672f69db.append(shm.name)\r\n dtypes_8a89b85b_0ada_42c4_945e_f708b4f67208.append(\u0027float64\u0027)\r\n dims_6a406d28_1c3f_4de2_bb2e_a33e694c3e1a.append((1))\r\n elif type(outs_i) \u003d\u003d tuple or type(outs_i) \u003d\u003d list:\r\n handle_output_list(outs_i)\r\n else:\r\n task.update(\u0027output type : \u0027 + str(type(outs_i)) + \u0027 not supported. Only supported output types are: np.ndarray, torch.tensor, int and float, or a list or tuple of any of those.\u0027)\r\n\r\n\r\nglobals()[\u0027handle_output_list\u0027] \u003d handle_output_list\r\n\r\n\r\n"}
[SERVICE-1] {"task": "b108a2f9-ca71-45e4-921a-9a7f75f67e21", "responseType": "LAUNCH"}
But I do not get the the output "Success" from the last line of the main method. It somehow seems that the requests gets stuck in the python environment.
I am testing on Windows 10.
As a side note: in order to run the code without an actual error message I had to locally apply this PR to JDLL: #48
Metadata
Metadata
Assignees
Labels
No labels