Skip to content

Commit

Permalink
New fix for OpenMP shared issue. See scottransom#107
Browse files Browse the repository at this point in the history
  • Loading branch information
scottransom committed Nov 12, 2019
1 parent 32c9b9e commit 608c59a
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 9 deletions.
4 changes: 2 additions & 2 deletions src/accel_utils.c
Original file line number Diff line number Diff line change
Expand Up @@ -1319,7 +1319,7 @@ void inmem_add_ffdotpows(ffdotpows * fundamental, accelobs * obs,

// Now add all the powers
#ifdef _OPENMP
#pragma omp parallel default(none) shared(rinds,fundamental,obs,numrs,numzs,harm_fract)
#pragma omp parallel shared(rinds,fundamental,obs)
#endif
{
const int zlo = fundamental->zlo;
Expand Down Expand Up @@ -1370,7 +1370,7 @@ void inmem_add_ffdotpows_trans(ffdotpows * fundamental, accelobs * obs,

// Now add all the powers
#ifdef _OPENMP
#pragma omp parallel default(none) shared(rinds,fundamental,obs,numrs,numzs,harm_fract)
#pragma omp parallel shared(rinds,fundamental,obs)
#endif
{
const int zlo = fundamental->zlo;
Expand Down
2 changes: 1 addition & 1 deletion src/dispersion.c
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ void dedisp_subbands(float *data, float *lastdata,
float *sub = result + subnum * numpts;
float *chan = lastdata + ii * numpts + dind;
#ifdef _OPENMP
#pragma omp parallel for default(none) private(jj) shared(sub,chan,numpts,dind)
#pragma omp parallel for private(jj) shared(sub,chan,numpts)
#endif
for (jj = 0; jj < numpts - dind; jj++)
sub[jj] += chan[jj];
Expand Down
12 changes: 6 additions & 6 deletions src/psrfits.c
Original file line number Diff line number Diff line change
Expand Up @@ -935,7 +935,7 @@ void get_PSRFITS_subint(float *fdata, unsigned char *cdata, struct spectra_info
const int idx = (s->use_poln - 1) * s->num_channels;
if (s->bits_per_sample == 16) {
#ifdef _OPENMP
#pragma omp parallel for default(none) shared(s,cdata,fdata,scales,offsets,weights,idx)
#pragma omp parallel for shared(s,cdata,fdata,scales,offsets,weights)
#endif
for (ii = 0; ii < s->spectra_per_subint; ii++) {
int jj;
Expand All @@ -949,7 +949,7 @@ void get_PSRFITS_subint(float *fdata, unsigned char *cdata, struct spectra_info
}
} else if (s->bits_per_sample == 32) {
#ifdef _OPENMP
#pragma omp parallel for default(none) shared(s,cdata,fdata,scales,offsets,weights,idx)
#pragma omp parallel for shared(s,cdata,fdata,scales,offsets,weights)
#endif
for (ii = 0; ii < s->spectra_per_subint; ii++) {
int jj;
Expand All @@ -964,7 +964,7 @@ void get_PSRFITS_subint(float *fdata, unsigned char *cdata, struct spectra_info
}
} else {
#ifdef _OPENMP
#pragma omp parallel for default(none) shared(s,cdata,fdata,scales,offsets,weights,idx)
#pragma omp parallel for shared(s,cdata,fdata,scales,offsets,weights)
#endif
for (ii = 0; ii < s->spectra_per_subint; ii++) {
int jj;
Expand All @@ -981,7 +981,7 @@ void get_PSRFITS_subint(float *fdata, unsigned char *cdata, struct spectra_info
const int idx = s->num_channels;
if (s->bits_per_sample == 16) {
#ifdef _OPENMP
#pragma omp parallel for default(none) shared(s,cdata,fdata,scales,offsets,weights,idx)
#pragma omp parallel for shared(s,cdata,fdata,scales,offsets,weights)
#endif
for (ii = 0; ii < s->spectra_per_subint; ii++) {
int jj;
Expand All @@ -1000,7 +1000,7 @@ void get_PSRFITS_subint(float *fdata, unsigned char *cdata, struct spectra_info
}
} else if (s->bits_per_sample == 32) {
#ifdef _OPENMP
#pragma omp parallel for default(none) shared(s,cdata,fdata,scales,offsets,weights,idx)
#pragma omp parallel for shared(s,cdata,fdata,scales,offsets,weights)
#endif
for (ii = 0; ii < s->spectra_per_subint; ii++) {
int jj;
Expand All @@ -1020,7 +1020,7 @@ void get_PSRFITS_subint(float *fdata, unsigned char *cdata, struct spectra_info
}
} else {
#ifdef _OPENMP
#pragma omp parallel for default(none) shared(s,cdata,fdata,scales,offsets,weights,idx)
#pragma omp parallel for shared(s,cdata,fdata,scales,offsets,weights)
#endif
for (ii = 0; ii < s->spectra_per_subint; ii++) {
int jj;
Expand Down

0 comments on commit 608c59a

Please sign in to comment.