Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into exposeMaskMethodsFo…
Browse files Browse the repository at this point in the history
…rScatter_bugfix
  • Loading branch information
NicoleJurjew committed Oct 3, 2024
2 parents f3fabf8 + a7d4e9d commit 6aacac7
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 48 deletions.
5 changes: 5 additions & 0 deletions CHANGES.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,10 @@
# ChangeLog

## v3.8.1

* SIRF/STIR
- prior value returned as double

## v3.8.0

* SIRF/STIR (PET and SPECT)
Expand Down
46 changes: 9 additions & 37 deletions src/xSTIR/cSTIR/cstir.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1198,26 +1198,11 @@ void*
cSTIR_objectiveFunctionGradient(void* ptr_f, void* ptr_i, int subset)
{
try {
ObjectiveFunction3DF& fun = objectFromHandle< ObjectiveFunction3DF>(ptr_f);
STIRImageData& id = objectFromHandle<STIRImageData>(ptr_i);
Image3DF& image = id.data();
STIRImageData* ptr_id = new STIRImageData(image);
shared_ptr<STIRImageData> sptr(ptr_id);
Image3DF& grad = sptr->data();
if (subset >= 0)
fun.compute_sub_gradient(grad, image, subset);
else {
int nsub = fun.get_num_subsets();
grad.fill(0.0);
STIRImageData* ptr_id = new STIRImageData(image);
shared_ptr<STIRImageData> sptr_sub(ptr_id);
Image3DF& subgrad = sptr_sub->data();
for (int sub = 0; sub < nsub; sub++) {
fun.compute_sub_gradient(subgrad, image, sub);
grad += subgrad;
}
}
return newObjectHandle(sptr);
auto& fun = objectFromHandle<xSTIR_ObjFun3DF>(ptr_f);
auto& id = objectFromHandle<STIRImageData>(ptr_i);
auto sptr_gd = std::make_shared<STIRImageData>(id);
fun.compute_gradient(id, subset, *sptr_gd);
return newObjectHandle(sptr_gd);
}
CATCH;
}
Expand All @@ -1227,23 +1212,10 @@ void*
cSTIR_computeObjectiveFunctionGradient(void* ptr_f, void* ptr_i, int subset, void* ptr_g)
{
try {
ObjectiveFunction3DF& fun = objectFromHandle< ObjectiveFunction3DF>(ptr_f);
xSTIR_ObjFun3DF& fun = objectFromHandle<xSTIR_ObjFun3DF>(ptr_f);
STIRImageData& id = objectFromHandle<STIRImageData>(ptr_i);
STIRImageData& gd = objectFromHandle<STIRImageData>(ptr_g);
Image3DF& image = id.data();
Image3DF& grad = gd.data();
if (subset >= 0)
fun.compute_sub_gradient(grad, image, subset);
else {
int nsub = fun.get_num_subsets();
grad.fill(0.0);
shared_ptr<STIRImageData> sptr_sub(new STIRImageData(image));
Image3DF& subgrad = sptr_sub->data();
for (int sub = 0; sub < nsub; sub++) {
fun.compute_sub_gradient(subgrad, image, sub);
grad += subgrad;
}
}
fun.compute_gradient(id, subset, gd);
return (void*) new DataHandle;
}
CATCH;
Expand Down Expand Up @@ -1357,8 +1329,8 @@ cSTIR_priorValue(void* ptr_p, void* ptr_i)
objectFromHandle<xSTIR_GeneralisedPrior3DF>(ptr_p);
STIRImageData& id = objectFromHandle<STIRImageData>(ptr_i);
Image3DF& image = id.data();
float v = (float)prior.compute_value(image);
return dataHandle<float>(v);
double v = prior.compute_value(image);
return dataHandle<double>(v);
}
CATCH;
}
Expand Down
34 changes: 26 additions & 8 deletions src/xSTIR/cSTIR/include/sirf/STIR/stir_x.h
Original file line number Diff line number Diff line change
Expand Up @@ -1119,11 +1119,33 @@ The actual algorithm is described in
}
};

class xSTIR_GeneralisedObjectiveFunction3DF :
public stir::GeneralisedObjectiveFunction < Image3DF > {
class xSTIR_GeneralisedObjectiveFunction3DF : public ObjectiveFunction3DF {
public:
//! computes the gradientof an objective function
/*! if the subset number is non-negative, computes the gradient of
this objective function for that subset, otherwise computes
the sum of gradients for all subsets
*/
void compute_gradient(const STIRImageData& id, int subset, STIRImageData& gd)
{
const Image3DF& image = id.data();
Image3DF& grad = gd.data();
if (subset >= 0)
compute_sub_gradient(grad, image, subset);
else {
int nsub = get_num_subsets();
grad.fill(0.0);
shared_ptr<STIRImageData> sptr_sub(new STIRImageData(image));
Image3DF& subgrad = sptr_sub->data();
for (int sub = 0; sub < nsub; sub++) {
compute_sub_gradient(subgrad, image, sub);
grad += subgrad;
}
}
}

void multiply_with_Hessian(Image3DF& output, const Image3DF& curr_image_est,
const Image3DF& input, const int subset) const
const Image3DF& input, const int subset) const
{
output.fill(0.0);
if (subset >= 0)
Expand All @@ -1134,13 +1156,9 @@ The actual algorithm is described in
}
}
}

// bool post_process() {
// return post_processing();
// }
};

//typedef xSTIR_GeneralisedObjectiveFunction3DF ObjectiveFunction3DF;
typedef xSTIR_GeneralisedObjectiveFunction3DF xSTIR_ObjFun3DF;

class xSTIR_PoissonLogLikelihoodWithLinearModelForMeanAndProjData3DF :
public stir::PoissonLogLikelihoodWithLinearModelForMeanAndProjData < Image3DF > {
Expand Down
2 changes: 1 addition & 1 deletion src/xSTIR/pSTIR/STIR.py
Original file line number Diff line number Diff line change
Expand Up @@ -2320,7 +2320,7 @@ def get_value(self, image):
assert_validity(image, ImageData)
handle = pystir.cSTIR_priorValue(self.handle, image.handle)
check_status(handle)
v = pyiutil.floatDataFromHandle(handle)
v = pyiutil.doubleDataFromHandle(handle)
pyiutil.deleteDataHandle(handle)
return v

Expand Down
10 changes: 8 additions & 2 deletions src/xSTIR/pSTIR/tests/tests_qp_lc_rdp.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,13 @@
import sirf.config
from sirf.Utilities import runner, RE_PYEXT, __license__, examples_data_path, pTest
__version__ = "2.1.0"
__author__ = "Imraj Singh, Evgueni Ovtchinnikov, Kris Thielemans"
__author__ = "Imraj Singh, Evgueni Ovtchinnikov, Kris Thielemans, Edoardo Pasca"

try:
subprocess.check_output('nvidia-smi')
has_nvidia = True
except:
has_nvidia = False


def Hessian_test(test, prior, x, eps=1e-3):
Expand Down Expand Up @@ -61,7 +67,7 @@ def test_main(rec=False, verb=False, throw=True):
for penalisation_factor in [0,1,4]:
for kappa in [True, False]:
priors = [sirf.STIR.QuadraticPrior(), sirf.STIR.LogcoshPrior(), sirf.STIR.RelativeDifferencePrior()]
if sirf.config.STIR_WITH_CUDA:
if sirf.config.STIR_WITH_CUDA and has_nvidia:
priors.append(sirf.STIR.CudaRelativeDifferencePrior())
for prior in priors:
if kappa:
Expand Down

0 comments on commit 6aacac7

Please sign in to comment.