08-12-2022 21:06 PM - last edited 08-12-2022 21:11 PM
//Format the collection data for the flow, then send the data & parameters to the flow. //Set delimiters. Choose character combinations very unlikely to show up in any of your data. Set(LineDelimiter, "~|"); Set(Delimiter, "#$"); //Create the CSV-like dataset with custom delimiters. Columns names in the 1st header row must match the column names in SQL. Any string data must have single quotes ' ' around it. Other datatypes like numbers do not require anything extra. Set(DataCSV, Concatenate("ID",Delimiter,"Comment",Delimiter,"Col2",LineDelimiter, Concat(Collection1, Concatenate(Text(ID),Delimiter,"'",Comment,"'",Delimiter,"'",Col2,"'",LineDelimiter))) ); //Remove the trailing linebreak characters at the end of the data Set(DataCSV, Left(DataCSV, Len(DataCSV) - Len(LineDelimiter))); //Send the parameters & data to the flow. //Service, Database, Table, CRUD-Operation, SQL Primary-Key Column Name, Delimiter, Line-Delimiter, Data, & Read-Query (Only for READ calls like "SELECT * FROM Table1", enter a dummy value if not a read call). Set(DataOutput, 'SQL-BatchActions'.Run("SQL", "TestDatabase", "dev.TestTable", "UPDATE", "ID", Delimiter, LineDelimiter, DataCSV, "").dataoutput ); //Get any response in the variable DataOutput //Put in the double quotes "" before & after each string value in each Regex key value pair, but do not use those double quotes "" with any non-string datatypes. //So if one of the middle key value pairs was "Test2": "AbcString", then use the Regex ""Test2"":""(?[^""]*)"" but if it was "Test2": 123, then use Regex ""Test2"":(?[^""]*) //Also the 1st key value pair includes the starting object curly bracket with the escape backslash \{ and the last key value pair includes the ending object curly bracket with the escape backslash \} ClearCollect(Collection2,MatchAll(DataOutput,"\{""ID"":(?[^""]*),""Comment"":""(?[^""]*)"",""Col2"":""(?[^""]*)""\}")) //More information on this JSON parsing method here: https://youtu.be/2xWiFkBf294 |