Description:
http://dev.mysql.com/doc/refman/5.1/en/udf-calling.html
"
unsigned int max_length
The maximum length of the result. The default max_length value differs depending on the result type of the function. For string functions, the default is the length of the longest argument. For integer functions, the default is 21 digits. For real functions, the default is 13 plus the number of decimal digits indicated by initid->decimals. (For numeric functions, the length includes any sign or decimal point characters.)
"
- no mention is made of DECIMAL UDFs. This is important because these seem to be treated as REAL UDFs; not as STRING UDFs, despite the fact that DECIMAL and STRING UDFs have identical signatures at the C level
- The REAL and DECIMAL UDFs, the default seems to be 17 rather than 13.
- When a string argument is passed to REAL or DECIMAL UDFs, the value is 23. It was expected that passing a string argument would count as 31 decimals just as it is in the case reported in bug 33544
How to repeat:
FIle: udf_return_values.c
-------------------------
#include <string.h>
#include <my_global.h>
#include <mysql.h>
#if defined(_WIN32) || defined(_WIN64)
#define DLLEXP __declspec(dllexport)
#else
#define DLLEXP /* no dll */
#endif
#ifdef __cplusplus
extern "C" {
#endif
DLLEXP my_bool udf_int_max_length_init(UDF_INIT *initid){
return 0;
}
DLLEXP longlong udf_int_max_length(
UDF_INIT *initid, UDF_ARGS *args,
my_bool *is_null, my_bool *error
){
return initid->max_length;
}
DLLEXP my_bool udf_real_max_length_init(UDF_INIT *initid){
initid->decimals = 0;
return 0;
}
DLLEXP double udf_real_max_length(
UDF_INIT *initid, UDF_ARGS *args,
my_bool *is_null, my_bool *error
){
initid->decimals = 0;
return (double)initid->max_length;
}
DLLEXP my_bool udf_str_max_length_init(UDF_INIT *initid){
initid->decimals = 0;
return 0;
}
DLLEXP char* udf_str_max_length(
UDF_INIT *initid, UDF_ARGS *args,
char* buffer, unsigned long* length,
my_bool *is_null, my_bool *error
){
initid->decimals = 0;
sprintf(buffer, "%u\0", initid->max_length);
*length = strlen(buffer);
return buffer;
}
#ifdef __cplusplus
}
#endif
Compile:
--------
gcc -Wall -I/opt/mysql/mysql/include -shared -o udf_return_values.so udf_return_values.c
Move udf_return_values.so to plugin dir
---------------------------------------
Install
-------
CREATE FUNCTION udf_real_max_length
RETURNS REAL soname 'udf_return_values.so';
SELECT udf_real_max_length();
-> returns 17, (doc suggests should be 13)
SELECT udf_real_max_length(0.1);
-> returns 18, (doc suggests should be 14)
SELECT udf_real_max_length('');
-> returns 23 (would expect 31 as per bug http://bugs.mysql.com/bug.php?id=33544)
CREATE FUNCTION udf_str_max_length
RETURNS DECIMAL soname 'udf_return_values.so';
SELECT udf_str_max_length();
-> returns 17, same as REAL udf
SELECT udf_str_max_length('');
-> returns 23! Would expect 31 (as per bug http://bugs.mysql.com/bug.php?id=33544)
Suggested fix:
Implement advertised behaviour? fix documentation?