Fix seg fault using Python 2 invalid utf-8 strings and wstring

Fixes seg fault when passing a Python string, containing invalid utf-8 content,
to a wstring or wchar * parameter.  A TypeError is thrown instead, eg:

  %include <std_wstring.i>
  void instring(const std::wstring& s);

  instring(b"h\xe9llooo") # Python
This commit is contained in:
William S Fulton 2018-06-15 19:14:52 +01:00
commit e96316bf31
5 changed files with 57 additions and 4 deletions

View file

@ -18,16 +18,28 @@ SWIG_AsWCharPtrAndSize(PyObject *obj, wchar_t **cptr, size_t *psize, int *alloc)
int isunicode = PyUnicode_Check(obj);
%#if PY_VERSION_HEX < 0x03000000 && !defined(SWIG_PYTHON_STRICT_UNICODE_WCHAR)
if (!isunicode && PyString_Check(obj)) {
obj = tmp = PyUnicode_FromObject(obj);
isunicode = 1;
tmp = PyUnicode_FromObject(obj);
if (tmp) {
isunicode = 1;
obj = tmp;
} else {
PyErr_Clear();
return SWIG_TypeError;
}
}
%#endif
if (isunicode) {
Py_ssize_t len = PyUnicode_GetSize(obj);
if (cptr) {
Py_ssize_t length;
*cptr = %new_array(len + 1, wchar_t);
PyUnicode_AsWideChar(SWIGPY_UNICODE_ARG(obj), *cptr, len);
(*cptr)[len] = 0;
length = PyUnicode_AsWideChar(SWIGPY_UNICODE_ARG(obj), *cptr, len);
if (length == -1) {
PyErr_Clear();
Py_XDECREF(tmp);
return SWIG_TypeError;
}
(*cptr)[length] = 0;
}
if (psize) *psize = (size_t) len + 1;
if (alloc) *alloc = cptr ? SWIG_NEWOBJ : 0;