scilab: check overflow in array typemaps
This commit is contained in:
parent
b60100453f
commit
38cd5bd405
2 changed files with 18 additions and 4 deletions
|
|
@ -40,6 +40,9 @@ testArray("array_d", array_d_set, array_d_get, [-10.5, 20.4], [-10.5, 20.4]);
|
|||
|
||||
if array_const_i_get() <> [10, 20] then swigtesterror(); end
|
||||
|
||||
ierr = execstr('array_i_set([0:10]', 'errcatch');
|
||||
if ierr == 0 then swigtesterror("Overflow error expected"); end
|
||||
|
||||
if BeginString_FIX44a_get() <> "FIX.a.a" then swigtesterror(); end
|
||||
if BeginString_FIX44b_get() <> "FIX.b.b" then swigtesterror(); end
|
||||
if BeginString_FIX44c_get() <> "FIX.c.c" then swigtesterror(); end
|
||||
|
|
|
|||
|
|
@ -4,6 +4,10 @@
|
|||
*
|
||||
* --------------------------------------------------------------------------*/
|
||||
|
||||
%{
|
||||
#include <stdio.h>
|
||||
%}
|
||||
|
||||
%define %scilab_asarray_withcopy(TYPEMAPTYPE, FRAGMENTNAME, CTYPE, TEMPDATATYPE)
|
||||
%typemap(TYPEMAPTYPE, fragment="FRAGMENTNAME") CTYPE {
|
||||
size_t i = 0;
|
||||
|
|
@ -21,16 +25,23 @@
|
|||
%enddef
|
||||
%define %scilab_asarrayandsize_withcopy(TYPEMAPTYPE, FRAGMENTNAME, CTYPE, TEMPDATATYPE)
|
||||
%typemap(TYPEMAPTYPE, fragment="FRAGMENTNAME") CTYPE {
|
||||
size_t i = 0;
|
||||
int iRows = 0;
|
||||
int iCols = 0;
|
||||
TEMPDATATYPE *pTempData = NULL;
|
||||
if (FRAGMENTNAME(pvApiCtx, $input, &iRows, &iCols, &pTempData, fname)) {
|
||||
return SWIG_ERROR;
|
||||
}
|
||||
// TODO: add check to be sure iRows*iCols==$1_dim0
|
||||
for (i = 0; i < $1_dim0; i++) {
|
||||
$1[i] = ($*1_ltype) pTempData[i];
|
||||
if (iRows*iCols <= $1_dim0) {
|
||||
size_t i;
|
||||
for (i = 0; i < $1_dim0; i++) {
|
||||
$1[i] = ($*1_ltype) pTempData[i];
|
||||
}
|
||||
}
|
||||
else {
|
||||
char errmsg[100];
|
||||
sprintf(errmsg, "Size of input data (%d) is too big (maximum is %d)",
|
||||
iRows*iCols, $1_dim0);
|
||||
SWIG_exception_fail(SWIG_OverflowError, errmsg);
|
||||
}
|
||||
}
|
||||
%enddef
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue