Browse Source

feat(python): support python 3.10

GitOrigin-RevId: a338ad5c68
master
Megvii Engine Team 2 years ago
parent
commit
cdf7baa245
16 changed files with 56 additions and 25 deletions
  1. +1
    -1
      imperative/python/megengine/traced_module/expr.py
  2. +1
    -1
      imperative/python/megengine/utils/comp_graph_tools.py
  3. +4
    -4
      imperative/python/megengine/utils/network.py
  4. +1
    -0
      imperative/python/requires-style.txt
  5. +2
    -1
      imperative/python/requires-test.txt
  6. +7
    -0
      imperative/python/src/helper.h
  7. +13
    -0
      imperative/python/test/unit/data/test_dataloader.py
  8. +9
    -0
      imperative/python/test/unit/data/test_pre_dataloader.py
  9. +7
    -0
      imperative/src/test/helper.h
  10. +1
    -1
      scripts/whl/macos/macos_build_whl.sh
  11. +1
    -1
      scripts/whl/manylinux2014/Dockerfile
  12. +1
    -1
      scripts/whl/manylinux2014/Dockerfile_aarch64
  13. +1
    -1
      scripts/whl/manylinux2014/do_build_common.sh
  14. +4
    -12
      scripts/whl/manylinux2014/init_image.sh
  15. +2
    -1
      scripts/whl/utils/utils.sh
  16. +1
    -1
      scripts/whl/windows/windows_build_whl.sh

+ 1
- 1
imperative/python/megengine/traced_module/expr.py View File

@@ -136,7 +136,7 @@ class Expr:
if outputs is None:
return
current_graph = active_module_tracer().current_scope()
if not isinstance(outputs, collections.Sequence):
if not isinstance(outputs, collections.abc.Sequence):
outputs = (outputs,)
for i in outputs:
assert isinstance(i, RawTensor), "The output must be a Tensor"


+ 1
- 1
imperative/python/megengine/utils/comp_graph_tools.py View File

@@ -82,7 +82,7 @@ class _OprStableOrderHeapq:
_used_id_name_pairs = None

def __init__(self, extra_priority):
assert isinstance(extra_priority, collections.Callable)
assert isinstance(extra_priority, collections.abc.Callable)
self._list = []
self._extra_priority = extra_priority
self._used_id_name_pairs = {}


+ 4
- 4
imperative/python/megengine/utils/network.py View File

@@ -324,7 +324,7 @@ class Network:
if isinstance(modifier, str):
om = modifier
modifier = lambda v: "{}.{}".format(om, v)
assert isinstance(modifier, collections.Callable)
assert isinstance(modifier, collections.abc.Callable)
for i in self.all_oprs:
v0 = i.name
v1 = modifier(v0)
@@ -550,7 +550,7 @@ def as_varnode(obj):
return ret

assert isinstance(
obj, collections.Iterable
obj, collections.abc.Iterable
), "{} is not compatible with VarNode".format(obj)

val = list(obj)
@@ -573,7 +573,7 @@ def as_oprnode(obj):
return obj

assert isinstance(
obj, collections.Iterable
obj, collections.abc.Iterable
), "{} is not compatible with OpNode".format(obj)

val = list(obj)
@@ -619,7 +619,7 @@ class NodeFilter:
oprs = get_oprs_seq(node_iter.inputs, False, False)
node_iter = itertools.islice(oprs, len(oprs) - 1)

assert isinstance(node_iter, collections.Iterable)
assert isinstance(node_iter, collections.abc.Iterable)
if (not isinstance(node_iter, NodeFilter)) and type(
self
) is not NodeFilterCheckType:


+ 1
- 0
imperative/python/requires-style.txt View File

@@ -2,3 +2,4 @@ black==19.10b0
isort==4.3.21
pylint==2.4.3
mypy==0.982
typed_ast==1.5.0; python_version > '3.8'

+ 2
- 1
imperative/python/requires-test.txt View File

@@ -1,4 +1,5 @@
pytest==5.3.0
pytest==5.3.0 ; python_version <= '3.9'
pytest==6.2.5 ; python_version > '3.9'
pytest-sphinx==0.3.1
tensorboardX==2.4
protobuf==3.20.0 ; python_version > '3.8'


+ 7
- 0
imperative/python/src/helper.h View File

@@ -4,6 +4,13 @@
#include "megbrain/imperative/op_def.h"
#include "megbrain/utils/persistent_cache.h"

// in python 3.10, ssize_t is not defined on windows
// so ssize_t should be defined manually before include pybind headers
#if defined(_MSC_VER)
#include <BaseTsd.h>
typedef SSIZE_T ssize_t;
#endif

#include <Python.h>
#include <iterator>
#include <string>


+ 13
- 0
imperative/python/test/unit/data/test_dataloader.py View File

@@ -7,6 +7,7 @@
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import math
import multiprocessing
import os
import platform
import time
@@ -134,6 +135,10 @@ def test_dataloader_parallel():
platform.system() == "Windows",
reason="dataloader do not support parallel on windows",
)
@pytest.mark.skipif(
multiprocessing.get_start_method() != "fork",
reason="the runtime error is only raised when fork",
)
def test_dataloader_parallel_timeout():
dataset = init_dataset()

@@ -161,6 +166,10 @@ def test_dataloader_parallel_timeout():
platform.system() == "Windows",
reason="dataloader do not support parallel on windows",
)
@pytest.mark.skipif(
multiprocessing.get_start_method() != "fork",
reason="the runtime error is only raised when fork",
)
def test_dataloader_parallel_worker_exception():
dataset = init_dataset()

@@ -287,6 +296,10 @@ def test_prestream_dataloader_multiprocessing():
platform.system() == "Windows",
reason="dataloader do not support parallel on windows",
)
@pytest.mark.skipif(
multiprocessing.get_start_method() != "fork",
reason="the runtime error is only raised when fork",
)
def test_predataloader_parallel_worker_exception():
dataset = MyPreStream(100)



+ 9
- 0
imperative/python/test/unit/data/test_pre_dataloader.py View File

@@ -8,6 +8,7 @@
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import gc
import math
import multiprocessing
import os
import platform
import time
@@ -146,6 +147,10 @@ def test_dataloader_parallel():
platform.system() == "Windows",
reason="dataloader do not support parallel on windows",
)
@pytest.mark.skipif(
multiprocessing.get_start_method() != "fork",
reason="the runtime error is only raised when fork",
)
def test_dataloader_parallel_timeout():
dataset = init_dataset()

@@ -174,6 +179,10 @@ def test_dataloader_parallel_timeout():
platform.system() == "Windows",
reason="dataloader do not support parallel on windows",
)
@pytest.mark.skipif(
multiprocessing.get_start_method() != "fork",
reason="the runtime error is only raised when fork",
)
def test_dataloader_parallel_worker_exception():
dataset = init_dataset()



+ 7
- 0
imperative/src/test/helper.h View File

@@ -2,6 +2,13 @@

#include <variant>

// in python 3.10, ssize_t is not defined on windows
// so ssize_t should be defined manually before include pybind headers
#if defined(_MSC_VER)
#include <BaseTsd.h>
typedef SSIZE_T ssize_t;
#endif

#include "megbrain/imperative.h"
#include "megbrain/test/helper.h"



+ 1
- 1
scripts/whl/macos/macos_build_whl.sh View File

@@ -42,7 +42,7 @@ platform=$(uname -m | awk '{print $0}')
if [ $platform = 'arm64' ];then
FULL_PYTHON_VER="3.8.10 3.9.4 3.10.1"
else
FULL_PYTHON_VER="3.6.10 3.7.7 3.8.3 3.9.4"
FULL_PYTHON_VER="3.6.10 3.7.7 3.8.3 3.9.4 3.10.1"
fi

if [[ -z ${ALL_PYTHON} ]]


+ 1
- 1
scripts/whl/manylinux2014/Dockerfile View File

@@ -1,4 +1,4 @@
FROM quay.io/pypa/manylinux2014_x86_64:2020-12-31-56195b3
FROM quay.io/pypa/manylinux2014_x86_64:2021-12-30-cb9fd5b

ENV UID=1024 \
PATH=${PATH}:/usr/local/cuda/bin \


+ 1
- 1
scripts/whl/manylinux2014/Dockerfile_aarch64 View File

@@ -1,4 +1,4 @@
FROM quay.io/pypa/manylinux2014_aarch64:2020-12-31-56195b3
FROM quay.io/pypa/manylinux2014_aarch64:2021-12-30-cb9fd5b

ENV UID=1024 PATH=${PATH}:/usr/local/cuda/bin



+ 1
- 1
scripts/whl/manylinux2014/do_build_common.sh View File

@@ -74,7 +74,7 @@ function patch_elf_depend_lib_megenginelite() {
SRC_DIR=$(readlink -f "`dirname $0`/../../../")
source ${SRC_DIR}/scripts/whl/utils/utils.sh

SUPPORT_ALL_VERSION="36m 37m 38 39"
SUPPORT_ALL_VERSION="36m 37m 38 39 310"
ALL_PYTHON=${ALL_PYTHON}
if [[ -z ${ALL_PYTHON} ]]
then


+ 4
- 12
scripts/whl/manylinux2014/init_image.sh View File

@@ -1,7 +1,5 @@
#!/bin/bash -e

GET_PIP_URL='https://bootstrap.pypa.io/get-pip.py'
GET_PIP_URL_36='https://bootstrap.pypa.io/pip/3.6/get-pip.py'
SWIG_URL='https://codeload.github.com/swig/swig/tar.gz/refs/tags/rel-3.0.12'
LLVM_URL='https://github.com/llvm-mirror/llvm/archive/release_60.tar.gz'
CLANG_URL='https://github.com/llvm-mirror/clang/archive/release_60.tar.gz'
@@ -17,20 +15,14 @@ yum install -y python3 python3-devel
python3 -m pip install cython -i https://mirrors.aliyun.com/pypi/simple
python3 -m pip install numpy -i https://mirrors.aliyun.com/pypi/simple

# FIXME: failed when install pip with python3.10 because python3.10
# is not installed on aarch64, so we remove 310 from ALL_PYTHON version now
ALL_PYTHON="36m 37m 38 39"
numpy_version="1.19.5"
ALL_PYTHON="36m 37m 38 39 310"
for ver in ${ALL_PYTHON}
do
python_ver=`echo $ver | tr -d m`
PIP_URL=${GET_PIP_URL}
if [ ${ver} = "36m" ];then
PIP_URL=${GET_PIP_URL_36}
numpy_version="1.19.5"
if [ ${ver} = "310" ];then
numpy_version="1.21.6"
fi
echo "use pip url: ${PIP_URL}"
curl ${PIP_URL} | /opt/python/cp${python_ver}-cp${ver}/bin/python - \
--no-cache-dir --only-binary :all:
/opt/python/cp${python_ver}-cp${ver}/bin/pip install \
--no-cache-dir --only-binary :all: numpy==${numpy_version} setuptools==46.1.3 \
-i https://mirrors.aliyun.com/pypi/simple


+ 2
- 1
scripts/whl/utils/utils.sh View File

@@ -78,7 +78,8 @@ function check_build_ninja_python_api() {
INCLUDE_KEYWORD="${ver}\\\\include"
PYTHON_API_INCLUDES="3.6.8\\\\include 3.7.7\\\\include 3.8.3\\\\include 3.9.4\\\\include 3.10.1\\\\include"
elif [[ $OS =~ "Linux" ]]; then
INCLUDE_KEYWORD="include/python3.${ver:1:1}"
ver=`echo $ver | tr -d m`
INCLUDE_KEYWORD="include/python3.${ver:1}" # like 39/310
info=`command -v termux-info || true`
if [[ "${info}" =~ "com.termux" ]]; then
echo "find termux-info at: ${info}"


+ 1
- 1
scripts/whl/windows/windows_build_whl.sh View File

@@ -26,7 +26,7 @@ SRC_DIR=$(READLINK -f "`dirname $0`/../../../")
source ${SRC_DIR}/scripts/whl/utils/utils.sh

ALL_PYTHON=${ALL_PYTHON}
FULL_PYTHON_VER="3.6.8 3.7.7 3.8.3 3.9.4"
FULL_PYTHON_VER="3.6.8 3.7.7 3.8.3 3.9.4 3.10.1"
if [[ -z ${ALL_PYTHON} ]]
then
ALL_PYTHON=${FULL_PYTHON_VER}


Loading…
Cancel
Save