Warning: file_get_contents(/data/phpspider/zhask/data//catemap/5/fortran/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Fortran “汇编问题”;未定义的引用;_Fortran - Fatal编程技术网

Fortran “汇编问题”;未定义的引用;

Fortran “汇编问题”;未定义的引用;,fortran,Fortran,我在编译代码时遇到以下错误 hdf5.o: In function `hdf5_module_mp_readhdf5_': hdf5.F:(.text+0x322e): undefined reference to `courant_' 主代码是channel.f(还有其他一些代码在这个问题中不起作用),它包含一个名为“courant”的函数 问题是在hdf5.f中,有另一个函数调用“courant” 这是生成文件: # This is the makefile COMPILER = if

我在编译代码时遇到以下错误

hdf5.o: In function `hdf5_module_mp_readhdf5_':
hdf5.F:(.text+0x322e): undefined reference to `courant_'
主代码是channel.f(还有其他一些代码在这个问题中不起作用),它包含一个名为“courant”的函数

问题是在hdf5.f中,有另一个函数调用“courant”

这是生成文件:

# This is the makefile 

COMPILER = ifort

USEROPTS = -O3 -fpp -mcmodel=large
FCOMP = -fpic -i_dynamic -mcmodel=large
LINK = -fpic -i_dynamic -mcmodel=large8.15-parallel

LINKDIR = -L/apps/fftw/2.1.5-double/lib -L/apps/hdf5/1.8.15/parallel/lib

INCLUDEDIR = -I/apps/fftw/2.1.5-double/include -I/apps/hdf5/1.8.15/parallel/include

PARALLEL = TRUE

LES = TRUE

NETCDF = FALSE
HDF5 = TRUE

NEWTON = FALSE

# Option to run different flavors (basic, ensemble, etc.)
ENSEM = FALSE
BATCH = FALSE
# **********    END of user definitions ************

ifeq ($(ARCH),ifort)
COMPILER = ifort
USEROPTS = -O3 -fpp
else ifeq ($(ARCH),gfortran)
COMPILER = gfortran
USEROPTS = -O3 -cpp
endif

ifeq ($(LES),TRUE)
LES_o = les.o
else
LES_o = no_les.o
endif

ALL2ALL=1

ifeq ($(PARALLEL),TRUE)
COMPILER = mpif90 
MPI = mpi.o
MPIF = mpi.alltoall$(ALL2ALL).F
ifeq ($(HDF5),TRUE)
HDF5_o = hdf5.o 
COMPILER = h5pfc
HDF5OPTS=-DHDF5
endif
ifeq ($(NEWTON),TRUE)
NEWTONOPTS=-DNEWTON
endif
else
ifeq ($(HDF5),TRUE)
HDF5_o = hdf5s.o 
COMPILER = h5pfc
HDF5OPTS=-DHDF5
endif
MPI = mpi_serial.o
endif

MAIN = diablo.f
HEADER = header
ENSEM_HOOKS = dummy_code/ensem_dummy.f
BATCH_HOOKS = dummy_code/batch_dummy.f
HOOKS = batch_hooks.o ensem_hooks.o
ADJOINT = 

ifeq ($(ENSEM),TRUE)
MAIN = ensemble.f
HEADER = header header_ensem
COMPILER = mpif90
ENSEM_HOOKS = ensem_hooks.f
endif

ifeq ($(BATCH),TRUE)
MAIN = batch.f
HEADER = header header_batch
BATCH_HOOKS = batch_hooks.f
#ADJOINT = adj_chan.o adj_per.o
ADJOINT = adj_per.o
endif

# Use the parameters to set flags
ifeq ($(NETCDF),TRUE)
COMPOPTS = $(USEROPTS) $(HDF5OPTS) $(NEWTONOPTS) $(INCLUDEDIR)
LINKOPTS = $(LINKDIR) -ldrfftw -ldfftw -lnetcdf
NETCDF_o = netcdf.o
else
COMPOPTS = $(USEROPTS) $(HDF5OPTS) $(NEWTONOPTS)
LINKOPTS = $(LINKDIR) -lrfftw -lfftw \
    ./lib/liblapack_ifort64.a \
    ./lib/libblas_ifort64.a
NETCDF_o = no_netcdf.o
endif



diablo: $(MAIN) diablo_io.o channel.o $(LES_o) \
    fft.o newton.o shared.o solvers.o $(MPI) \
    $(HDF5_o)
    $(COMPILER) $(COMPOPTS) $(MAIN) -o diablo \
    diablo_io.o channel.o $(LES_o) \
    fft.o newton.o shared.o solvers.o \
    $(MPI) $(LINKOPTS) $(HDF5_o)

shared.o: shared.F grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c shared.F

solvers.o: solvers.F shared.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c solvers.F

diablo_io.o: diablo_io.F shared.o channel.o solvers.o fft.o $(HDF5_o)
    $(COMPILER) $(COMPOPTS) -c diablo_io.F

channel.o: channel.F solvers.o fft.o $(MPI) shared.o $(LES_o) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c channel.F

ifeq ($(LES),TRUE) 
les.o: les.F fft.o shared.o $(MPI) $(HDF5_o) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c les.F
else
no_les.o: dummy_code/no_les.f fft.o shared.o $(MPI) $(HDF5_o) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c dummy_code/no_les.f
endif

ifeq ($(NETCDF),TRUE)
netcdf.o: netcdf.f header
    $(COMPILER) $(COMPOPTS) -c netcdf.f
else
no_netcdf.o: dummy_code/no_netcdf.f 
    $(COMPILER) $(COMPOPTS) -c dummy_code/no_netcdf.f
endif

ifeq ($(PARALLEL),TRUE)
mpi.o: $(MPIF) shared.o solvers.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c -o mpi.o $(MPIF) 
else
mpi_serial.o: dummy_code/mpi_serial.f shared.o solvers.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c dummy_code/mpi_serial.f
endif

header : header_mpi grid_def

header_mpi : grid_mpi

hdf5.o : hdf5.F shared.o fft.o $(MPI) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c hdf5.F

fft.o:  fft.F shared.o $(MPI) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c fft.F

#rand.o:  rand.f grid_def grid_mpi
#   $(COMPILER) $(COMPOPTS) -c rand.f

newton.o: newton.F90 $(LES_o) $(HDF5_o) $(MPI) fft.o shared.o solvers.o \
    diablo_io.o grid_def grid_mpi NewtonHook.o GMRESm.o
    $(COMPILER) $(COMPOPTS) -c newton.F90

NewtonHook.o: NewtonHook.F90 $(LES_o) $(HDF5_o) $(MPI) fft.o shared.o solvers.o \
    diablo_io.o grid_def grid_mpi GMRESm.o
    $(COMPILER) $(COMPOPTS) -c NewtonHook.F90

GMRESm.o: GMRESm.F90 $(LES_o) $(HDF5_o) $(MPI) fft.o shared.o solvers.o \
    diablo_io.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c GMRESm.F90

clean:
    rm -f *.o fort.* *~ diablo core *.mod

发生了什么事?

COURANT
频道模块中定义

因此,您需要
hdf5\u模块
中需要的地方使用通道模块
,而不是相反

现在,您的代码已在
频道\u模块
中使用了
hdf5\u模块
。相反,请尝试以下操作:

MODULE channel_module
  USE shared_module
  USE solvers_module
  USE les_module

  CONTAINS
  .....
  SUBROUTINE COURANT
  ..
  END SUBROUTINE COURANT
  .....
END MODULE channel_module


COURANT
是在使用另一个模块的模块中定义的,但正是正在使用的模块试图调用子例程?你有一个循环依赖,所以请清楚地表明你正在努力实现什么,以便我们可以建议如何删除它。请注意,您没有尝试在
hdf5\u模块
中使用
channel\u模块
(这将是循环的),因此我们不明白您为什么希望找到该子例程。另请看。我试图实现的是编译。在我看来,在hdf5.f中调用COURANT(在channel.f中定义)应该不会有任何问题,是吗?我不明白为什么引用未定义。
courant
在模块
channel\u module
中定义,但
hdf5\u module
(/
ReadHDF5
)不使用该模块。模块中的引用不可访问,因此除非您定义另一个
courant
引用,否则可能会出现未定义的引用。同样,如果您不使用hdf5_模块
,则无法调用该模块提供的子例程ReadHDF5。它会给出“错误#7002:打开编译的模块文件时出错。请检查包含路径。[CHANNEL_module]”,我是否也必须更改生成文件?清除生成,并首先编译
通道模块
# This is the makefile 

COMPILER = ifort

USEROPTS = -O3 -fpp -mcmodel=large
FCOMP = -fpic -i_dynamic -mcmodel=large
LINK = -fpic -i_dynamic -mcmodel=large8.15-parallel

LINKDIR = -L/apps/fftw/2.1.5-double/lib -L/apps/hdf5/1.8.15/parallel/lib

INCLUDEDIR = -I/apps/fftw/2.1.5-double/include -I/apps/hdf5/1.8.15/parallel/include

PARALLEL = TRUE

LES = TRUE

NETCDF = FALSE
HDF5 = TRUE

NEWTON = FALSE

# Option to run different flavors (basic, ensemble, etc.)
ENSEM = FALSE
BATCH = FALSE
# **********    END of user definitions ************

ifeq ($(ARCH),ifort)
COMPILER = ifort
USEROPTS = -O3 -fpp
else ifeq ($(ARCH),gfortran)
COMPILER = gfortran
USEROPTS = -O3 -cpp
endif

ifeq ($(LES),TRUE)
LES_o = les.o
else
LES_o = no_les.o
endif

ALL2ALL=1

ifeq ($(PARALLEL),TRUE)
COMPILER = mpif90 
MPI = mpi.o
MPIF = mpi.alltoall$(ALL2ALL).F
ifeq ($(HDF5),TRUE)
HDF5_o = hdf5.o 
COMPILER = h5pfc
HDF5OPTS=-DHDF5
endif
ifeq ($(NEWTON),TRUE)
NEWTONOPTS=-DNEWTON
endif
else
ifeq ($(HDF5),TRUE)
HDF5_o = hdf5s.o 
COMPILER = h5pfc
HDF5OPTS=-DHDF5
endif
MPI = mpi_serial.o
endif

MAIN = diablo.f
HEADER = header
ENSEM_HOOKS = dummy_code/ensem_dummy.f
BATCH_HOOKS = dummy_code/batch_dummy.f
HOOKS = batch_hooks.o ensem_hooks.o
ADJOINT = 

ifeq ($(ENSEM),TRUE)
MAIN = ensemble.f
HEADER = header header_ensem
COMPILER = mpif90
ENSEM_HOOKS = ensem_hooks.f
endif

ifeq ($(BATCH),TRUE)
MAIN = batch.f
HEADER = header header_batch
BATCH_HOOKS = batch_hooks.f
#ADJOINT = adj_chan.o adj_per.o
ADJOINT = adj_per.o
endif

# Use the parameters to set flags
ifeq ($(NETCDF),TRUE)
COMPOPTS = $(USEROPTS) $(HDF5OPTS) $(NEWTONOPTS) $(INCLUDEDIR)
LINKOPTS = $(LINKDIR) -ldrfftw -ldfftw -lnetcdf
NETCDF_o = netcdf.o
else
COMPOPTS = $(USEROPTS) $(HDF5OPTS) $(NEWTONOPTS)
LINKOPTS = $(LINKDIR) -lrfftw -lfftw \
    ./lib/liblapack_ifort64.a \
    ./lib/libblas_ifort64.a
NETCDF_o = no_netcdf.o
endif



diablo: $(MAIN) diablo_io.o channel.o $(LES_o) \
    fft.o newton.o shared.o solvers.o $(MPI) \
    $(HDF5_o)
    $(COMPILER) $(COMPOPTS) $(MAIN) -o diablo \
    diablo_io.o channel.o $(LES_o) \
    fft.o newton.o shared.o solvers.o \
    $(MPI) $(LINKOPTS) $(HDF5_o)

shared.o: shared.F grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c shared.F

solvers.o: solvers.F shared.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c solvers.F

diablo_io.o: diablo_io.F shared.o channel.o solvers.o fft.o $(HDF5_o)
    $(COMPILER) $(COMPOPTS) -c diablo_io.F

channel.o: channel.F solvers.o fft.o $(MPI) shared.o $(LES_o) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c channel.F

ifeq ($(LES),TRUE) 
les.o: les.F fft.o shared.o $(MPI) $(HDF5_o) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c les.F
else
no_les.o: dummy_code/no_les.f fft.o shared.o $(MPI) $(HDF5_o) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c dummy_code/no_les.f
endif

ifeq ($(NETCDF),TRUE)
netcdf.o: netcdf.f header
    $(COMPILER) $(COMPOPTS) -c netcdf.f
else
no_netcdf.o: dummy_code/no_netcdf.f 
    $(COMPILER) $(COMPOPTS) -c dummy_code/no_netcdf.f
endif

ifeq ($(PARALLEL),TRUE)
mpi.o: $(MPIF) shared.o solvers.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c -o mpi.o $(MPIF) 
else
mpi_serial.o: dummy_code/mpi_serial.f shared.o solvers.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c dummy_code/mpi_serial.f
endif

header : header_mpi grid_def

header_mpi : grid_mpi

hdf5.o : hdf5.F shared.o fft.o $(MPI) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c hdf5.F

fft.o:  fft.F shared.o $(MPI) grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c fft.F

#rand.o:  rand.f grid_def grid_mpi
#   $(COMPILER) $(COMPOPTS) -c rand.f

newton.o: newton.F90 $(LES_o) $(HDF5_o) $(MPI) fft.o shared.o solvers.o \
    diablo_io.o grid_def grid_mpi NewtonHook.o GMRESm.o
    $(COMPILER) $(COMPOPTS) -c newton.F90

NewtonHook.o: NewtonHook.F90 $(LES_o) $(HDF5_o) $(MPI) fft.o shared.o solvers.o \
    diablo_io.o grid_def grid_mpi GMRESm.o
    $(COMPILER) $(COMPOPTS) -c NewtonHook.F90

GMRESm.o: GMRESm.F90 $(LES_o) $(HDF5_o) $(MPI) fft.o shared.o solvers.o \
    diablo_io.o grid_def grid_mpi
    $(COMPILER) $(COMPOPTS) -c GMRESm.F90

clean:
    rm -f *.o fort.* *~ diablo core *.mod
MODULE channel_module
  USE shared_module
  USE solvers_module
  USE les_module

  CONTAINS
  .....
  SUBROUTINE COURANT
  ..
  END SUBROUTINE COURANT
  .....
END MODULE channel_module
MODULE hdf5_module
  USE shared_module
  USE mpi_module
  USE channel_module

  CONTAINS 
  .....
  subroutine ReadHDF5
  ...
  CALL COURANT
  ...
  end subroutine ReadHDF5

END MODULE hdf5_module