This question is not a duplicate of this question because the given answer works only if the query isn't a variable.
the following query is working but the result is saved in a xml file.
SELECT XMLTYPE.createXML (DBMS_XMLGEN.getxml ('select 2 as a from dual')) FROM DUAL;
It's working but I can use macro only in oracle>19. (because of the macro)
with FUNCTION f_test return varchar2 SQL_MACRO is
query VARCHAR2(100) := 'select 1 a from dual';
ret VARCHAR2(100) := chr(13) || query || chr(13);
BEGIN
RETURN ret;
END;
SELECT JSON_ARRAYagg( json_object(t.*) )
FROM f_test() t
I've tried to use dynamic sql with oracle 19
WITH
FUNCTION f
RETURN JSON_ARRAY
IS
query VARCHAR2 (100) := 'select 1 from dual';
l_str VARCHAR2 (1000);
l_cnt JSON_ARRAY;
BEGIN
l_str :=
'with from_dynamic_query as ('
|| query
|| ') SELECT JSON_ARRAYagg( json_object(*) ) from from_dynamic_query';
EXECUTE IMMEDIATE l_str
INTO l_cnt;
RETURN l_cnt;
END;
SELECT
FROM DUAL;
[Error] Execution (20: 8): ORA-06553: PLS-313: 'F' not declared in this scope ORA-06552: PL/SQL: Item ignored ORA-06553: PLS-488: 'JSON_ARRAY' must be a type
CodePudding user response:
As I wrote in the comment, the issue may not be related to SQL_MACRO
, but inability to process *
in json_object
(see db<>fiddle in 18c).
But this may also be worked out with Polymorphic Table Functions, which are available in 18c. You need to define new output calculated column with a row value serialized into JSON.
Below is the code example:
create package pkg_ser as /*Package to implement PTF*/ function describe( tab in out dbms_tf.table_t ) return dbms_tf.describe_t ; procedure fetch_rows; end pkg_ser; /
create package body pkg_ser as function describe( tab in out dbms_tf.table_t ) return dbms_tf.describe_t as begin /*Mark input columns as used for subsequent row processing*/ for i in 1..tab.column.count loop tab.column(i).for_read := TRUE; end loop; /*Declare json output column*/ return dbms_tf.describe_t( new_columns => dbms_tf.columns_new_t( 1 => dbms_tf.column_metadata_t( name => 'JSONVAL', type => dbms_tf.type_varchar2 ) ) ); end; procedure fetch_rows /*Process rowset and serialize each row in JSON*/ as rowset dbms_tf.row_set_t; num_rows pls_integer; new_col dbms_tf.tab_varchar2_t; begin /*Get rows*/ dbms_tf.get_row_set( rowset => rowset, row_count => num_rows ); for rn in 1..num_rows loop /*Calculate new column value in the same row*/ new_col(rn) := dbms_tf.row_to_char( rowset => rowset, rid => num_rows, format => dbms_tf.FORMAT_JSON ); end loop; /*Put column to output*/ dbms_tf.put_col( columnid => 1, collection => new_col ); end; end pkg_ser; /
create function f_serialize_json(tab in table) /*Function to serialize into JSON using PTF*/ return table pipelined row polymorphic using pkg_ser; /
with function f_local_exec ( query in clob ) return varchar2 as ret varchar2(32000); begin /*Translate string to query using EXECUTE IMMEDIATE*/ execute immediate ' with a as ( ' || query || ' ) select json_arrayagg(jsonval format json) from f_serialize_json(a) ' into ret; return ret; end; select f_local_exec( 'select level as id, mod(level, 3) as val from dual connect by level < 10' ) as jsonval from dual
| JSONVAL | | :------------------------------------------------------------------------------------------------------------------------------------------------------------------ | | [{"ID":9, "VAL":0},{"ID":9, "VAL":0},{"ID":9, "VAL":0},{"ID":9, "VAL":0},{"ID":9, "VAL":0},{"ID":9, "VAL":0},{"ID":9, "VAL":0},{"ID":9, "VAL":0},{"ID":9, "VAL":0}] |
db<>fiddle here
CodePudding user response:
It works if I replace json_arry with clob
WITH
FUNCTION f
RETURN clob
IS
query VARCHAR2 (100) := 'select 1 a from dual';
l_str VARCHAR2 (1000);
l_cnt clob;
BEGIN
l_str :=
'with from_dynamic_query as ('
|| query
|| ') SELECT JSON_ARRAYagg( json_object(*) ) from from_dynamic_query';
EXECUTE IMMEDIATE l_str
INTO l_cnt;
RETURN l_cnt;
END;
SELECT f()
FROM DUAL;