因为不是很懂 所以全粘上了 麻烦您了
因为不是很懂 所以全粘上了 麻烦您了
您好,您的报错来看是头文件链接的问题,能否烦请您指出您编译的具体脚本?
#!/usr/bin/env python
from __future__ import print_function
import os
import re
import sys
import stat
import glob
import shutil
import subprocess
import distutils.ccompiler
import distutils.command.clean
from setuptools import setup, find_packages, distutils
from torch.utils.cpp_extension import BuildExtension, CppExtension
################################################################################
# Parameters parsed from environment
################################################################################
RUN_BUILD_CORE_LIBS = True
RUN_BUILD_OFFLINE_DEMO = bool((os.getenv('BUILD_OFFLINE_DEMO') is None)
or (os.getenv('BUILD_OFFLINE_DEMO').upper()
not in ['OFF', '0', 'NO', 'FALSE', 'N']))
RUN_AUTO_GEN_CATCH_CODE = True
RUN_BUILD_ASAN_CHECK = False
RUN_BUILD_WARNING_CHECK = True
for i, arg in enumerate(sys.argv):
if arg == 'clean':
RUN_BUILD_CORE_LIBS = False
RUN_BUILD_OFFLINE_DEMO = False
RUN_AUTO_GEN_CATCH_CODE = False
# Get the current path, core library paths and neuware path
cwd = os.path.dirname(os.path.abspath(__file__))
lib_path = os.path.join(cwd, "torch_mlu", "csrc", "lib")
neuware_path = os.path.join(cwd, "third_party", "neuware")
# NEUWARE_HOME env must be set before compiling
if not os.getenv('NEUWARE_HOME'):
if os.path.exists(os.path.join(neuware_path, "lib64")):
os.environ['NEUWARE_HOME'] = neuware_path
else:
print("[Error] can't find NEUWARE_HOME, Please set it!")
print("Or use script/build_mlu_libs.sh to generate Neuware!")
sys.exit()
# Get Pytorch Dir
base_dir = os.path.dirname(os.path.abspath(__file__))
pytorch_source_path = os.getenv('PYTORCH_HOME', os.path.dirname(base_dir))
# lib/pythonx.x/site-packages
rel_site_packages = distutils.sysconfig.get_python_lib(prefix='')
# full absolute path to the dir above
full_site_packages = distutils.sysconfig.get_python_lib()
# Define the compile and link options
extra_link_args = []
extra_compile_args = []
# Check env flag
def _check_env_flag(name, default=''):
return os.getenv(name, default).upper() in ['ON', '1', 'YES', 'TRUE', 'Y']
# make relative rpath
def make_relative_rpath(path):
return '-Wl,-rpath,$ORIGIN/' + path
# Generate parts of header/source files in Catch automatically
def gen_catch_code(pytorch_source_path_):
os.system('python torch_mlu/tools/gen.py' + ' --pytorch_dir' + ' ' + pytorch_source_path_)
# Calls build_catch_lib.sh with the corrent env variables
def build_libs():
build_libs_cmd = ['bash', os.path.join('..', 'script', 'build_catch_lib.sh')]
my_env = os.environ.copy()
my_env["PYTORCH_PYTHON"] = sys.executable
my_env['CMAKE_INSTALL'] = 'make install'
cmake_prefix_path = full_site_packages
if "CMAKE_PREFIX_PATH" in my_env:
cmake_prefix_path = my_env["CMAKE_PREFIX_PATH"] + ";" + cmake_prefix_path
my_env["CMAKE_PREFIX_PATH"] = cmake_prefix_path
my_env["PYTORCH_SOURCE_PATH"] = pytorch_source_path
# Keep the same compile and link args between setup.py and build_catch_lib.sh
my_env["EXTRA_COMPILE_ARGS"] = ' '.join(extra_compile_args)
my_env["EXTRA_LINK_ARGS"] = ' '.join(extra_link_args)
# set up the gtest compile runtime environment.
my_env['BUILD_TEST'] = 'ON' if _check_env_flag('BUILD_TEST') else 'OFF'
try:
os.mkdir('build')
except OSError:
pass
kwargs = {'cwd': 'build'}
if subprocess.call(build_libs_cmd, env=my_env, **kwargs) != 0:
print("Failed to run '{}'".format(' '.join(build_libs_cmd)))
sys.exit(1)
class Build(BuildExtension):
def run(self):
# Run the original BuildExtension first. We need this before building
# the tests.
BuildExtension.run(self)
class Clean(distutils.command.clean.clean):
def run(self):
try:
with open('.gitignore', 'r') as f:
ignores = f.read()
pat = re.compile(r'^#( BEGIN NOT-CLEAN-FILES )?')
for wildcard in filter(None, ignores.split('\n')):
match = pat.match(wildcard)
if match:
if match.group(1):
# Marker is found and stop reading .gitignore.
break
# Ignore lines which begin with '#'.
else:
for filename in glob.glob(wildcard):
try:
os.remove(filename)
except OSError:
shutil.rmtree(filename, ignore_errors=True)
except OSError:
shutil.rmtree('build', ignore_errors=True)
# It's an old-style class in Python 2.7...
distutils.command.clean.clean.run(self)
# Configuration for Build the Project.
main_libraries = ['catch_python']
include_dirs = []
library_dirs = []
# Fetch the sources to be built.
torch_mlu_sources = (
glob.glob('torch_mlu/csrc/stub.cpp')
)
# include head files
include_dirs += [
base_dir,
pytorch_source_path,
os.path.join(pytorch_source_path, 'torch', 'csrc'),
os.path.join(pytorch_source_path, 'torch', 'lib', 'tmp_install', 'include'),
]
#include lib files
library_dirs.append(lib_path)
extra_compile_args += [
'-std=c++11',
'-pthread',
'-Wno-sign-compare',
'-Wno-deprecated-declarations',
'-Wno-return-type',
'-Werror',
]
DEBUG = _check_env_flag('DEBUG')
if RUN_BUILD_ASAN_CHECK:
# To get a reasonable performace add -O1 or higher.
# run executable with LD_PRELOAD=path/to/asan/runtime/lib
extra_compile_args += ['-O1', '-g', '-DDEBUG',
'-fsanitize=address', '-fno-omit-frame-pointer']
elif DEBUG:
extra_compile_args += ['-Og', '-g', '-DDEBUG']
else:
extra_compile_args += ['-O3']
TEST_COVERAGE = _check_env_flag('TEST_COVERAGE')
if TEST_COVERAGE:
extra_compile_args += ['-fprofile-arcs', '-ftest-coverage']
extra_link_args += ['-fprofile-arcs', '-ftest-coverage']
#to test coverage, these args are necessary
# Replace pre-commit of .git to use cpplint
if os.path.exists(os.path.join(cwd, '.git')):
shutil.copyfile(base_dir + '/script/hooks/commit-msg',
base_dir + '/.git/hooks/commit-msg')
shutil.copyfile(base_dir + '/script/hooks/pre-commit',
base_dir + '/.git/hooks/pre-commit')
# Set the file permission to 755
os.chmod(base_dir + '/.git/hooks/pre-commit',
stat.S_IRWXU+stat.S_IROTH+stat.S_IXOTH+stat.S_IRGRP+stat.S_IXGRP)
os.chmod(base_dir + '/.git/hooks/commit-msg',
stat.S_IRWXU+stat.S_IROTH+stat.S_IXOTH+stat.S_IRGRP+stat.S_IXGRP)
# Generate parts of Catch code
if RUN_AUTO_GEN_CATCH_CODE:
gen_catch_code(pytorch_source_path)
# Build Catch Core Libs
if RUN_BUILD_CORE_LIBS:
build_libs()
# the list of runtime dependencies required by this built package
install_requires = []
# Setup
setup(
name='torch_mlu',
version='0.15.0-1',
description='MLU bridge for PyTorch',
# Exclude the build files.
packages=find_packages(exclude=['build']),
ext_modules=[
CppExtension(
'_MLUC',
libraries=main_libraries,
sources=torch_mlu_sources,
include_dirs=include_dirs,
extra_compile_args=extra_compile_args,
library_dirs=library_dirs,
extra_link_args=extra_link_args + [make_relative_rpath('torch_mlu/csrc/lib')]),
],
install_requires=install_requires,
package_data={
'torch_mlu': [
'csrc/lib/*.so*',
'csrc/aten/cnnl/cnnlDescriptors.h',
'csrc/aten/cnnl/cnnlHandle.h',
'csrc/aten/cnnl/cnnlQuantify.h',
'csrc/aten/core/allocator.h',
'csrc/aten/core/caching_allocator.h',
'csrc/aten/core/profiler_mlu.h',
'csrc/aten/core/tensor_impl.h',
'csrc/aten/core/tensor_util.h',
'csrc/aten/device/device.h',
'csrc/aten/device/exceptions.h',
'csrc/aten/device/guard_impl.h',
'csrc/aten/device/mlu_guard.h',
'csrc/aten/device/notifier.h',
'csrc/aten/device/queue_guard.h',
'csrc/aten/device/queue.h',
'csrc/aten/device/types.h',
'csrc/aten/operators/op_proxy.h',
'csrc/aten/util/cnlog.h',
'csrc/aten/util/common.h',
'csrc/aten/util/infer_trans_size.h',
'csrc/aten/util/matrix_util.h',
'csrc/aten/util/memory_allocator.h',
'csrc/aten/util/python_interface.h',
'csrc/aten/util/python_notifier.h',
],
},
cmdclass={
'build_ext': Build,
'clean': Clean,
})
# do some specified operations
if RUN_BUILD_OFFLINE_DEMO:
# Because pycocotools depends on Cython, so Cython must be installed firstly.
# However, the installing order of packages in requirements.txt is random, so
# Cython and pycocotools can't be installed by requirements.txt
os.system('pip install Cython==0.29.16')
os.system('pip install pycocotools==2.0.0')
# build offline demos
if RUN_BUILD_OFFLINE_DEMO:
print("==BUILD OFFLINE DEMO==")
subprocess.check_call('examples/offline/scripts/build_offline.sh')
请登录后评论