diff --git a/.github/workflows/bundle_with_dakota.yml b/.github/workflows/bundle_with_dakota.yml new file mode 100644 index 00000000..1d57f2b8 --- /dev/null +++ b/.github/workflows/bundle_with_dakota.yml @@ -0,0 +1,162 @@ +name: ๐ŸŽ๏ธ Make & Test Wheels ๐ŸŽ๏ธ + +on: [pull_request] + +env: + ERT_SHOW_BACKTRACE: 1 + NO_PROJECT_RES: 1 + BOOST_VERSION: 1.83.0 + BOOST_VERSION_UNDERSCORES: 1_83_0 + DAKOTA_VERSION: 6.18.0 + SEBA_TAG: 6.12.0 + INSTALL_DIR: local + FORCE_REBUILD: false + +jobs: + build_wheels: + name: ๐Ÿ›ž Build Wheels ๐Ÿ›ž + timeout-minutes: 120 + strategy: + fail-fast: false + matrix: + python-version: ['3.8','3.9','3.10'] # Versions > 3.10 excluded. Reason: not supported by Everest. + os: [ubuntu-latest] + ert-version: ['main'] + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/cache@v3 + id: cache-package-check + with: + key: carolina_dist_${{ matrix.os }}_python-${{ matrix.python-version }}_boost-${{ env.BOOST_VERSION }}_dakota-${{ env.DAKOTA_VERSION }} + path: ./carolina_dist + + - uses: actions/checkout@v3 + if: env.FORCE_REBUILD == 'true' || steps.cache-package-check.outputs.cache-hit != 'true' + with: + fetch-depth: 0 + + - name: Build Linux Wheel + if: env.FORCE_REBUILD == 'true' || steps.cache-package-check.outputs.cache-hit != 'true' + uses: docker://quay.io/pypa/manylinux2014_x86_64 + env: + PYTHON_VERSION: ${{ matrix.python-version }} + BOOST_VERSION: ${{ env.BOOST_VERSION }} + BOOST_VERSION_UNDERSCORES: ${{ env.BOOST_VERSION_UNDERSCORES }} + DAKOTA_VERSION: ${{ env.DAKOTA_VERSION }} + with: + entrypoint: /bin/bash + args: '-c "sh dakota_manylinux_install_files/build_wheels_gha.sh ${{ matrix.python-version }}"' + + - uses: actions/cache/save@v3 + if: env.FORCE_REBUILD == 'true' || steps.cache-package-check.outputs.cache-hit != 'true' + id: cache-package-store + with: + key: carolina_dist_${{ matrix.os }}_python-${{ matrix.python-version }}_boost-${{ env.BOOST_VERSION }}_dakota-${{ env.DAKOTA_VERSION }} + path: ./carolina_dist + + - name: Get Carolina wheel from cache + uses: actions/cache/restore@v3 + id: restore-cached-package + with: + key: carolina_dist_${{ matrix.os }}_python-${{ matrix.python-version }}_boost-${{ env.BOOST_VERSION }}_dakota-${{ env.DAKOTA_VERSION }} + path: /local/carolina_dist + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Set up python venv + run: | + python -m venv myvenv + + - name: Install Carolina from wheel + run: | + source myvenv/bin/activate + pyv=$(echo ${{matrix.python-version}} | sed 's/\.//g') + pip install carolina_dist/carolina-1.0-cp$pyv-cp$pyv-manylinux_2_17_x86_64.manylinux2014_x86_64.whl + + - name: Install other Everest dependencies + run: | + source myvenv/bin/activate + pip install git+https://${{ secrets.DEPENDENCY_ACCESS_TOKEN }}@github.com/equinor/spinningjenny.git + pip install git+https://${{ secrets.DEPENDENCY_ACCESS_TOKEN }}@github.com/TNO-Everest/seba.git@${{ env.SEBA_TAG }} + + - name: Install Everest + run: | + source myvenv/bin/activate + git clone https://${{ secrets.DEPENDENCY_ACCESS_TOKEN }}@github.com/equinor/everest.git + cd everest + pip install .[test,docs] + + - name: Install latest ert main + if: matrix.ert-version == 'main' + run: | + source myvenv/bin/activate + sudo apt-get install build-essential + pip install git+https://github.com/equinor/ert.git + + - name: Run Tests + run: | + source myvenv/bin/activate + cd everest + pytest tests -n 4 -m "not ui_test and not integration_test" --dist loadgroup -sv + + - name: Run Integration Tests + run: | + source myvenv/bin/activate + cd everest + pytest tests -n 4 -m "integration_test" --dist loadgroup + + - name: Build Documentation + run: | + source myvenv/bin/activate + cd everest + mkdir tmp + sphinx-build -W -b html -d tmp/doctrees docs/source {envtmpdir}/html + + - name: Run UI Tests + env: + QT_QPA_PLATFORM: 'minimal' + run: | + source myvenv/bin/activate + cd everest + python -m pytest -m "ui_test" + + - name: Upload wheel as artifact + if: always() + uses: actions/upload-artifact@v3 + with: + name: ${{ matrix.os }} Python ${{ matrix.python-version }} wheel + path: | + carolina_dist/* + trace/* + + publish: + name: ๐Ÿ‘พ Publish ๐Ÿ‘พ + runs-on: ubuntu-latest + needs: [ build_wheels ] + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + steps: + - name: Get wheels + uses: actions/download-artifact@v3 + with: + path: artifacts + + - name: Create dist/ folder with all python wheels + run: | + mkdir dist + find artifacts -name "carolina*.whl" -exec mv '{}' dist/ \; + + - name: Upload dist/ folder as artifact (debug) + uses: actions/upload-artifact@v3 + with: + path: dist/* + + - name: Publish to pypi + uses: pypa/gh-action-pypi-publish@v1.8.10 + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} diff --git a/README.md b/README.md index 32086ac6..95d163b0 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,15 @@ # Carolina -Carolina is a [pyDAKOTA](https://github.com/wisdem/pyDAKOTA) fork maintained by Equinor. Its raison d'รชtre is to have easier building of a Python [Dakota](https://dakota.sandia.gov/) wrapper, without any MPI support. +Carolina is a [pyDAKOTA](https://github.com/wisdem/pyDAKOTA) fork maintained by Equinor. Its raison d'รชtre is to have easier building of a Python [Dakota](https://dakota.sandia.gov/) wrapper, without any MPI support. Carolina supports Python version 3.8, 3.9, 3.10 ## Installation +For Linux: -Python version: Carolina supports Python version 3.6, 3.7, 3.8, 3.10 +```pip install carolina``` +If not on Linux, build Carolina youself as described below. + +## Building and installing Carolina In order to build Carolina, [Boost](https://www.boost.org/), including Boost.Python, and [Dakota](https://dakota.sandia.gov/) must be installed. This requires [CMake](https://cmake.org/) and a C/C++ compiler. The `BOOST_ROOT` environment variable can be set to the location of the boost library, if not in a default location. diff --git a/dakota_manylinux_install_files/CMakeLists.txt.patch b/dakota_manylinux_install_files/CMakeLists.txt.patch new file mode 100644 index 00000000..a7ae8f0d --- /dev/null +++ b/dakota_manylinux_install_files/CMakeLists.txt.patch @@ -0,0 +1,31 @@ +--- a/dakota-6.18.0-public-src-cli/CMakeLists.txt 2023-05-11 03:08:25 ++++ b/dakota-6.18.0-public-src-cli/CMakeLists.txt 2023-10-26 08:43:44 +@@ -150,10 +150,20 @@ + # Perl is required for examples, docs, and system tests + find_package(Perl REQUIRED) + ++set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -Wall -lpthread -lutil") + # Python is optionally required by Dakota, Teuchos, and Acro; probe + # for components here at top-level: ++include_directories(${PYTHON_INCLUDE_DIRS}) ++include_directories(/usr/include) ++include_directories(/usr/lib64) ++include_directories(/tmp/INSTALL_DIR/dakota-6.18.0-public-src-cli/packages/external/eigen3/include/) ++find_package(Threads REQUIRED) ++ ++set (PYBIND11_PYTHON_VERSION "3.10") ++ + include(DakotaFindPython) + dakota_find_python() ++add_link_options() + + # Conditionally find Java JDK needed for input spec, docs, Java API + include(DakotaFindJava) +@@ -186,6 +196,7 @@ + "${CMAKE_SHARED_LIBRARY_CREATE_CXX_FLAGS} -undefined dynamic_lookup") + endif() + ++link_directories(/usr/lib64) + + include(DakotaFindSystemTPLs) + # Unconditionally find BLAS/LAPACK or equivalent diff --git a/dakota_manylinux_install_files/DakotaFindPython.cmake.patch b/dakota_manylinux_install_files/DakotaFindPython.cmake.patch new file mode 100644 index 00000000..88d5131f --- /dev/null +++ b/dakota_manylinux_install_files/DakotaFindPython.cmake.patch @@ -0,0 +1,11 @@ +--- a/dakota-6.18.0-public-src-cli/cmake/DakotaFindPython.cmake 2023-05-11 03:08:25 ++++ b/dakota-6.18.0-public-src-cli/cmake/DakotaFindPython.cmake 2023-10-26 08:43:44 +@@ -17,7 +17,7 @@ + + endif() + +- find_package(Python REQUIRED ${dakota_python_components}) ++ find_package(Python COMPONENTS ${dakota_python_components} REQUIRED) + + # pybind11, C3, Acro, etc., use older CMake FindPythonInterp, so we + # coerce it to use same as Dakota; more complex situations may diff --git a/dakota_manylinux_install_files/build_wheels_gha.sh b/dakota_manylinux_install_files/build_wheels_gha.sh new file mode 100644 index 00000000..01efd1e9 --- /dev/null +++ b/dakota_manylinux_install_files/build_wheels_gha.sh @@ -0,0 +1,159 @@ +#!/bin/bash +set -e + +if [ -z "$1" ]; then + echo "Please provide a Python version as an argument (e.g., 3.10)" + exit 1 +fi + +cd /tmp +INSTALL_DIR=/tmp/INSTALL_DIR + +mkdir -p $INSTALL_DIR +mkdir /github/workspace/trace +touch /github/workspace/trace/boost_bootstrap.log +touch /github/workspace/trace/boost_install.log +touch /github/workspace/trace/dakota_bootstrap.log +touch /github/workspace/trace/dakota_install.log +touch /github/workspace/trace/env + +# VERY IMPORTANT: extract python dev headers, +# more info: https://github.com/pypa/manylinux/pull/1250 +pushd /opt/_internal && tar -xJf static-libs-for-embedding-only.tar.xz && popd + +echo "pushd /opt/_internal && tar -xJf static-libs-for-embedding-only.tar.xz && popd" >> /github/workspace/trace/env +echo "INSTALL_DIR=$INSTALL_DIR" >> /github/workspace/trace/env + +yum install lapack-devel -y +yum install python3-devel.x86_64 -y +yum install -y wget +cd /tmp + +wget https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_$BOOST_VERSION_UNDERSCORES.tar.bz2 --no-check-certificate > /dev/null +python_exec=$(which python$1) +$python_exec -m venv myvenv +source ./myvenv/bin/activate +pip install numpy +pip install pybind11[global] + +PYTHON_DEV_HEADERS_DIR=$(rpm -ql python3-devel.x86_64 | grep '\.h$' | head -n 1 | xargs dirname) +NUMPY_INCLUDE_PATH=$(find /tmp -type d -path "*site-packages/numpy/core/include") +PYTHON_INCLUDE_PATH=$(python -c "from distutils.sysconfig import get_python_inc; print(get_python_inc())") +python_root=$(python -c "import sys; print(sys.prefix)") +python_version=$(python --version | sed -E 's/.*([0-9]+\.[0-9]+)\.([0-9]+).*/\1/') +python_version_no_dots="$(echo "${python_version//\./}")" +python_bin_include_lib=" using python : $python_version : $(python -c "from sysconfig import get_paths as gp; g=gp(); print(f\"$python_exec : {g['include']} : {g['stdlib']} ;\")")" + +echo "Found dev headers $PYTHON_DEV_HEADERS_DIR" +echo "Found numpy include path $NUMPY_INCLUDE_PATH" +echo "Found python include path $PYTHON_INCLUDE_PATH" +echo "Found python root $python_root" + +tar xf boost_$BOOST_VERSION_UNDERSCORES.tar.bz2 +cd boost_$BOOST_VERSION_UNDERSCORES + +echo "python_exec=$python_exec" >> /github/workspace/trace/env +echo "PYTHON_DEV_HEADERS_DIR=$PYTHON_DEV_HEADERS_DIR" >> /github/workspace/trace/env +echo "NUMPY_INCLUDE_PATH=$NUMPY_INCLUDE_PATH" >> /github/workspace/trace/env +echo "PYTHON_INCLUDE_PATH=$PYTHON_INCLUDE_PATH" >> /github/workspace/trace/env +echo "python_root=$python_root" >> /github/workspace/trace/env +echo "python_version=$python_version" >> /github/workspace/trace/env +echo "python_version_no_dots=$python_version_no_dots" >> /github/workspace/trace/env +echo "python_bin_include_lib=$python_bin_include_lib" >> /github/workspace/trace/env +echo "bootstrap_cmd=./bootstrap.sh --with-libraries=python,filesystem,program_options,regex,serialization,system --with-python=$(which python) --with-python-root=$python_root &> "$INSTALL_DIR/boost_bootstrap.log"" >> /github/workspace/trace/env + +./bootstrap.sh --with-libraries=python,filesystem,program_options,regex,serialization,system --with-python=$(which python) --with-python-root="$python_root" &> "$INSTALL_DIR/boost_bootstrap.log" +sed -i -e "s|.*using python.*|$python_bin_include_lib|" project-config.jam +echo "# sed -i -e \"s|.*using python.*|$python_bin_include_lib|\" project-config.jam" >> /github/workspace/trace/env + +./b2 install -j8 -a cxxflags="-std=c++17" --prefix="$INSTALL_DIR" &> /github/workspace/trace/boost_install.log +echo "# ./b2 install -j8 -a cxxflags="-std=c++17" --prefix="$INSTALL_DIR" &> /github/workspace/trace/boost_install.log" >> /github/workspace/trace/env + +cd $INSTALL_DIR +DAKOTA_INSTALL_DIR=/tmp/INSTALL_DIR/dakota +mkdir -p $DAKOTA_INSTALL_DIR +echo "DAKOTA_INSTALL_DIR=$DAKOTA_INSTALL_DIR" >> /github/workspace/trace/env + +wget https://github.com/snl-dakota/dakota/releases/download/v$DAKOTA_VERSION/dakota-$DAKOTA_VERSION-public-src-cli.tar.gz > /dev/null +tar xf dakota-$DAKOTA_VERSION-public-src-cli.tar.gz + +CAROLINA_DIR=/github/workspace + +cd dakota-$DAKOTA_VERSION-public-src-cli +patch -s -p2 < $CAROLINA_DIR/dakota_manylinux_install_files/CMakeLists.txt.patch +patch -s -p2 < $CAROLINA_DIR/dakota_manylinux_install_files/DakotaFindPython.cmake.patch + +mkdir build +cd build + +export PATH=/tmp/INSTALL_DIR/bin:$PATH +export PYTHON_INCLUDE_DIRS="$PYTHON_INCLUDE_PATH $PYTHON_DEV_HEADERS_DIR /tmp/INSTALL_DIR/lib" +export PYTHON_EXECUTABLE=$(which python) +export LD_LIBRARY_PATH="$INSTALL_DIR/lib:/usr/local/lib:$PYTHON_INCLUDE_PATH:$NUMPY_INCLUDE_PATH:$NUMPY_INCLUDE_PATH/numpy:$PYTHON_DEV_HEADERS_DIR:/tmp/INSTALL_DIR/lib:$LD_LIBRARY_PATH" + +echo "export PATH=$PATH" >> /github/workspace/trace/env +echo "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH" >> /github/workspace/trace/env +echo "export PYTHON_INCLUDE_DIRS=$PYTHON_INCLUDE_DIRS" >> /github/workspace/trace/env +echo "export PYTHON_EXECUTABLE=$PYTHON_EXECUTABLE" >> /github/workspace/trace/env + +export BOOST_PYTHON="boost_python$python_version_no_dots" +export BOOST_ROOT=$INSTALL_DIR +export PATH="$PATH:$INSTALL_DIR/bin" + +# More stable approach: Go via python +numpy_lib_dir=$(find /tmp/myvenv/ -name numpy.libs) +export LD_LIBRARY_PATH="/usr/lib:/usr/lib64:$INSTALL_DIR/lib:$INSTALL_DIR/bin:$numpy_lib_dir:$NUMPY_INCLUDE_PATH" +export CMAKE_LIBRARY_PATH=$(echo $LD_LIBRARY_PATH | sed 's/::/:/g' | sed 's/:/;/g') +export PYTHON_LIBRARIES="/usr/lib64/" +export PYTHON_INCLUDE_DIR="/opt/_internal/cpython-3.7.17/include/python3.7m" +export CMAKE_LINK_OPTS="-Wl,--copy-dt-needed-entries,-l pthread" + +echo "export BOOST_PYTHON=$BOOST_PYTHON" >> /github/workspace/trace/env +echo "export BOOST_ROOT=$BOOST_ROOT" >> /github/workspace/trace/env +echo "export PATH=$PATH" >> /github/workspace/trace/env +echo "export LD_LIBRARY_PATH=$LD_LIBRARY_PATH" >> /github/workspace/trace/env +echo "export CMAKE_LIBRARY_PATH=\"$CMAKE_LIBRARY_PATH\"" >> /github/workspace/trace/env +echo "export PYTHON_LIBRARIES=\"$PYTHON_LIBRARIES\"" >> /github/workspace/trace/env +echo "export PYTHON_INCLUDE_DIR=\"$PYTHON_INCLUDE_DIR\"" >> /github/workspace/trace/env + +cmake_command=""" +cmake \ + -DCMAKE_CXX_STANDARD=14 \ + -DBUILD_SHARED_LIBS=ON \ + -DDAKOTA_PYTHON=ON \ + -DDAKOTA_PYTHON_DIRECT_INTERFACE=ON \ + -DDAKOTA_PYTHON_DIRECT_INTERFACE_NUMPY=ON \ + -DDAKOTA_DLL_API=OFF \ + -DHAVE_X_GRAPHICS=OFF \ + -DDAKOTA_ENABLE_TESTS=OFF \ + -DDAKOTA_ENABLE_TPL_TESTS=OFF \ + -DCMAKE_BUILD_TYPE="Release" \ + -DDAKOTA_NO_FIND_TRILINOS:BOOL=TRUE \ + -DCMAKE_INSTALL_PREFIX="$INSTALL_DIR" \ + -DPYTHON_LIBRARIES=$PYTHON_LIBRARIES \ + -DCMAKE_LINK_OPTIONS=\"$CMAKE_LINK_OPTS\" \ + .. &> "$INSTALL_DIR/dakota_bootstrap.log" + +""" +echo "# $cmake_command" >> /github/workspace/trace/env + +echo "Boostrapping Dakota ..." +$($cmake_command &> /github/workspace/trace/dakota_bootstrap.log) + +echo "# make --debug=b -j8 install" >> /github/workspace/trace/env +echo "Building Dakota ..." +make --debug=b -j8 install &> /github/workspace/trace/dakota_install.log + +cd $INSTALL_DIR/.. + +git clone https://github.com/equinor/Carolina.git +cd Carolina +pip install . &> $INSTALL_DIR/carolina_install.log + +pip install pytest +pytest tests + +pip wheel . -w wheelhouse &> $INSTALL_DIR/carolina_pipwheel.log +auditwheel repair wheelhouse/* -w /github/workspace/carolina_dist &> $INSTALL_DIR/carolina_auditwheel_repair.log + +echo "Copied distributables and installation trace" diff --git a/pyproject.toml b/pyproject.toml index 88ae299d..7461d885 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,12 +1,12 @@ [build-system] -requires = ["setuptools", "setuptools_scm[toml]>=6.2", "wheel", "numpy"] +requires = ["setuptools", "wheel", "numpy"] build-backend = "setuptools.build_meta" [project] name="carolina" requires-python = ">=3.8" -dynamic = ["version"] +version = "1.0" description = "Python wrapper around Dakota" authors = [ { name = "National Renewable Energy Laboratory (NREL)" }, @@ -16,8 +16,6 @@ readme = "README.md" license = {text = "Apache 2.0"} [metadata] +version = "1.0" homepage = "https://github.com/equinor/carolina" repository = "https://github.com/equinor/carolina" - -[tool.setuptools_scm] -write_to = "src/version.py" diff --git a/setup.py b/setup.py index 8573e102..91391c78 100755 --- a/setup.py +++ b/setup.py @@ -156,6 +156,8 @@ def get_carolina_extension(): name="carolina", version="%s" % CAROLINA_VERSION, description="A Python wrapper for DAKOTA", + long_description=open('README.md').read(), + long_description_content_type='text/markdown', py_modules=["dakota"], ext_modules=[CAROLINA], package_dir={"": "src"}, diff --git a/src/README.txt b/src/README.txt deleted file mode 100644 index bdc5f908..00000000 --- a/src/README.txt +++ /dev/null @@ -1,70 +0,0 @@ -carolina: a Python wrapper for DAKOTA -------------------------------------- - -This is a generic Python wrapper for DAKOTA originally written by -Peter Graf, National Renewable Energy Lab, 2012. peter.graf@nrel.gov. -That code combined both the generic wrapper and an OpenMDAO 'driver' -which used the wrapper. For maintenance reasons, the code has been split -into this generic portion usable by any Python program, and a separate -OpenMDAO driver plugin. - -The original code is at https://github.com/NREL/DAKOTA_plugin. -The file dakface.pdf provides some background on how the original code -was structured, and is generally valid with this updated version. - -The OpenMDAO driver using this code is at -https://github.com/OpenMDAO-Plugins/dakota-driver. - - -This code provides: - -1. An interface to DAKOTA, in "library mode", that supports passing to DAKOTA -argc/argv for the command-line, an optional MPI communicator, and a pointer -to a Python exception object. This is still in C++. - -2. A Python wrapper for this interface, so, in Python, you can say -"import dakota", then "dakota.DakotaBase().run_dakota(mpi_comm=comm)". -"comm" will be used as the MPI communicator for DAKOTA, and -DakotaBase.dakota_callback() will be called by DAKOTA for function evaluations. - -The deliverable is a Python 'egg'. If your environment is properly configured, -you can use this to build the egg: - - python setup.py bdist_egg -d . - -To install the egg (easy_install is from setuptools): - - easy_install carolina-6.6_1-py2.7-linux-x86_64.egg - - -To run a trivial test: - - python -m test_dakota - -This has been tested on Linux and Windows. Cygwin has also been sucessfully -built in the past. - - -Requirements ------------- - -To build you'll need DAKOTA 6.0+ (svn trunk >= 2707). - -To install, just use easy_install or pip to install the egg. All non-system -libraries are provided. DAKOTA graphics is disabled for both LInux and Windows. - - -License -------- -This software is licensed under the Apache 2.0 license. -See "Apache2.0License.txt" in this directory. - - -C++ source code: ----------------- -dakface.cpp: This is the library entry point. It runs DAKOTA in 'sandwich' -mode where the caller provides input and DAKOTA calls-back to perform function -evaluations. - -dakota_python_binding.cpp: This is the boost wrapper that exposes the -functions in dakface.cpp to python.