diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index b69bf2c2..ddfc87e9 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -3,14 +3,44 @@ name: Python package on: [pull_request] jobs: - tests: - name: "Python ${{ matrix.python-version }} on ${{ matrix.os }}" + build-wheels: + name: Build wheels + timeout-minutes: 30 strategy: fail-fast: false matrix: - python-version: [3.6, 3.7, 3.8] - os: [ubuntu-18.04] + python-version: ['3.8', '3.9', '3.10'] + os: [ubuntu-latest] + + runs-on: ${{ matrix.os }} + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - name: Build Linux Wheel + uses: docker://quay.io/pypa/manylinux2014_x86_64 + with: + entrypoint: /github/workspace/ci/build_linux_wheel.sh + args: ${{ matrix.python-version }} + + - name: Upload wheel as artifact + uses: actions/upload-artifact@v2 + with: + name: ${{ matrix.os }} Python ${{ matrix.python-version }} wheel + path: dist/* + + tests: + name: Run pytest + needs: [build-wheels] + timeout-minutes: 30 + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10'] + os: [ubuntu-latest] runs-on: ${{ matrix.os }} steps: @@ -23,19 +53,43 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Install Ubuntu dependencies - run: | - sudo add-apt-repository ppa:jokva/carolina - sudo apt-get update - sudo apt-get install python3-dakota1 python3-libdakota-dev; - sudo apt-get install libboost-python-dev libboost-all-dev + - name: Get wheels + uses: actions/download-artifact@v2 + with: + name: ${{ matrix.os }} Python ${{ matrix.python-version }} wheel - - name: Install Dependencies + - name: Install wheel and test dependencies run: | + find . -name "*.whl" -exec pip install {} \; pip install -r requirements.txt pip install pytest - pip install . - name: Run Tests run: | python -m pytest + + + publish: + name: Publish to PyPI + runs-on: ubuntu-latest + needs: [tests] + + # If this is a tagged release + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + + steps: + - name: Get wheels + uses: actions/download-artifact@v2 + with: + path: artifacts + + - name: Move to dist/ + run: | + mkdir dist + find artifacts -name "*.whl" -exec mv '{}' dist/ \; + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@v1.3.1 + with: + user: statoil-travis + password: ${{ secrets.pypi_password }} diff --git a/ci/build_linux_wheel.sh b/ci/build_linux_wheel.sh new file mode 100755 index 00000000..7fdb7e4b --- /dev/null +++ b/ci/build_linux_wheel.sh @@ -0,0 +1,37 @@ +#!/bin/bash +set -euo pipefail + +case "$1" in + 3.6) pyver=cp36-cp36m ;; + 3.7) pyver=cp37-cp37m ;; + 3.8) pyver=cp38-cp38 ;; + 3.9) pyver=cp39-cp39 ;; + 3.10) pyver=cp310-cp310 ;; + *) + echo "Unknown Python version $1" + exit 1 + ;; +esac + +git config --global --add safe.directory /github/workspace + +# Install dependencies +yum install -y openmpi-devel lapack +export CFLAGS="-I/usr/include/openmpi-x86_64" +export CXXFLAGS="-I/usr/include/openmpi-x86_64" + +# Install boost +yum install -y boost-devel boost-python36-devel + +# Download dakota +dakota_ver=6.16.0 +dakota_dir=dakota-${dakota_ver}-public-rhel7.Linux.x86_64-cli +curl -O https://dakota.sandia.gov/sites/default/files/distributions/public/${dakota_dir}.tar.gz +tar xf ${dakota_dir}.tar.gz +export PATH=$(realpath "${dakota_dir}/bin"):$PATH +export LD_LIBRARY_PATH=$(realpath "${dakota_dir}/bin"):$(realpath "${dakota_dir}/lib") + +# Build wheel +cd /github/workspace +/opt/python/$pyver/bin/pip wheel . --no-deps -w wheelhouse +auditwheel repair wheelhouse/* -w dist diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..bdc60859 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,19 @@ +[build-system] + +requires = [ + "setuptools", + "wheel", + "numpy", +] + +[tool.pytest.ini_options] +markers = [ + "requires_ert_storage", + "script", + "requires_window_manager", + "consumer_driven_contract_test", + "unstable", + "consumer_driven_contract_verification", + "integration_test" +] +asyncio_mode = "auto" diff --git a/setup.py b/setup.py index 02a6a010..5961550f 100755 --- a/setup.py +++ b/setup.py @@ -104,12 +104,18 @@ def get_boost_inc_lib(): """ boost_root = os.getenv('BOOST_ROOT', None) + boost_inc_dir_env = os.getenv('BOOST_INCLUDE_DIRS', None) + boost_lib_dir_env = os.getenv('BOOST_LIBRARY_DIRS', None) boost_inc_dir = None boost_lib_dir = None if boost_root: boost_inc_dir = os.path.join(boost_root, 'include') boost_lib_dir = os.path.join(boost_root, 'lib') + if boost_inc_dir_env is not None: + boost_inc_dir = boost_inc_dir_env + if boost_lib_dir_env is not None: + boost_lib_dir = boost_lib_dir_env boost_python = os.getenv('BOOST_PYTHON', get_default_boost_python()) diff --git a/src/dakface.cpp b/src/dakface.cpp index 5e06686f..a0ec780d 100755 --- a/src/dakface.cpp +++ b/src/dakface.cpp @@ -24,12 +24,6 @@ // Replaces Python.h according to boost_python docs. #include -#ifdef DAKOTA_HAVE_MPI -#include -#include -namespace mpi = boost::mpi; -#endif - #include #include "dakota_windows.h" @@ -69,13 +63,6 @@ int all_but_actual_main(int argc, char* argv[], void *exc, bool throw_on_error=f return _main(argc, argv, NULL, exc, throw_on_error); } -#ifdef DAKOTA_HAVE_MPI -int all_but_actual_main_mpi(int argc, char* argv[], MPI_Comm comm, void *exc, bool throw_on_error=false) -{ - return _main(argc, argv, &comm, exc, trhow_on_error); -} -#endif - static int _main(int argc, char* argv[], MPI_Comm *pcomm, void *exc, bool throw_on_error) { static bool initialized = false; @@ -98,14 +85,7 @@ static int _main(int argc, char* argv[], MPI_Comm *pcomm, void *exc, bool throw_ // input data checks. Assumes comm rank 0. //Dakota::ProgramOptions opts(argc, argv, 0); //Dakota::ParallelLibrary(argc, argv); -#ifdef DAKOTA_HAVE_MPI - int rank; - // int ok; - // ok = MPI_Comm_rank( *pcomm, &rank ) ; - Dakota::ProgramOptions opts(argc, argv, rank); -#else Dakota::ProgramOptions opts(argc, argv, 0); -#endif if(throw_on_error) // Have Dakota throw an exception rather than aborting the process when error occurs diff --git a/src/dakota_python_binding.cpp b/src/dakota_python_binding.cpp index 4b4bf39c..e922781e 100755 --- a/src/dakota_python_binding.cpp +++ b/src/dakota_python_binding.cpp @@ -1,31 +1,3 @@ -#ifdef DAKOTA_HAVE_MPI -#include -#include - -static void sayhello(MPI_Comm comm) -{ - if (comm == MPI_COMM_NULL) { - std::cout << "You passed MPI_COMM_NULL !!!" << std::endl; - return; - } - int size; - MPI_Comm_size(comm, &size); - int rank; - MPI_Comm_rank(comm, &rank); - int plen; char pname[MPI_MAX_PROCESSOR_NAME]; - MPI_Get_processor_name(pname, &plen); - std::cout << - "Hello, World! " << - "I am process " << rank << - " of " << size << - " on " << pname << - "." << std::endl; -} - -#include - -#endif - #include #include "dakface.hpp" #include @@ -68,32 +40,6 @@ int run_dakota(char *infile, char *outfile, char *errfile, bp::object exc, int r return all_but_actual_main(argc, argv, tmp_exc, throw_on_error); } -#ifdef DAKOTA_HAVE_MPI -int run_dakota_mpi(char *infile, bp::object py_comm, char *outfile, char *errfile, bp::object exc, int restart, bool throw_on_error) -{ - MPI_Comm comm = MPI_COMM_WORLD; - if (py_comm) { - PyObject* py_obj = py_comm.ptr(); - MPI_Comm *comm_p = PyMPIComm_Get(py_obj); - if (comm_p == NULL) bp::throw_error_already_set(); - //sayhello(*comm_p); - MPI_Comm comm = * comm_p ; - } - - MAKE_ARGV - if (restart==1){ - argv[argc++] = const_cast("-r"); \ - argv[argc++] = const_cast("dakota.rst"); - } - - void *tmp_exc = NULL; - if (exc) - tmp_exc = &exc; - - return all_but_actual_main_mpi(argc, argv, comm, tmp_exc, throw_on_error); -} -#endif - void translator(const int& exc) { if (!PyErr_Occurred()) { @@ -113,10 +59,6 @@ BOOST_PYTHON_MODULE(carolina) using namespace bpn; #endif -#ifdef DAKOTA_HAVE_MPI - if (import_mpi4py() < 0) return; -#endif - #if PY_MAJOR_VERSION >= 3 import_array1(); #else @@ -130,10 +72,6 @@ BOOST_PYTHON_MODULE(carolina) register_exception_translator(&translator); def("run_dakota", run_dakota, "run dakota"); - -#ifdef DAKOTA_HAVE_MPI - def("run_dakota_mpi", run_dakota_mpi); -#endif }