Unlock a world of possibilities! Login now and discover the exclusive benefits awaiting you.
Hi, I have a case.
the first dataset is daily data updated via api. The second table is qvd file (30-day data). And now I need to combine both tables so that the 30-day data is always overwritten by the daily data and saved in lib: // Tracker (lenak) / Price /.
The code:
LIB CONNECT TO 'Price-expert (lenak)';
RestConnectorMasterTable:
SQL SELECT
"token"
FROM JSON (wrap on) "root";
LOAD [token] AS [token]
RESIDENT RestConnectorMasterTable;
DROP TABLE RestConnectorMasterTable;
LET vAccessTokenValue = Peek('token');
trace $(vAccessTokenValue);
Set vTotal = 0;
Set vStartAt = 0;
Set vPageSize = 1000;
do
RestConnectorMasterTable:
SQL SELECT
"count",
"__KEY_root",
(SELECT
"date",
"name",
"Vendor key",
"T2 Key",
"Series key",
"price",
"CPU Key",
"GPU key",
"SSD key",
"RAM Key",
"OS key",
"Resolution key",
"Size key",
"Freq key",
"__FK_data"
FROM "data" FK "__FK_data")
FROM JSON (wrap on) "root" PK "__KEY_root"
WITH CONNECTION (
URL "http://173.999.13.185/api/api-integration-v1/resource/offer-attribute-daily",
HTTPHEADER "X-HTTP-Method-Override" "GET",
HTTPHEADER "Authorization" "Bearer $(vAccessTokenValue)",
QUERY "offset" "$(vStartAt)",
QUERY "limit" "$(vPageSize)"
);
[PriceExpertOfferAttributeDaily]:
LOAD
[date],
[name],
[Vendor key],
[T2 Key],
[Series key],
[price],
[CPU Key],
[GPU key],
[SSD key],
[RAM Key],
[OS key],
[Resolution key],
[Size key],
[Freq key]
RESIDENT RestConnectorMasterTable;
LOAD [count] AS [total]
RESIDENT RestConnectorMasterTable;
DROP TABLE RestConnectorMasterTable;
let vTotal = Peek('total');
trace $(vTotal);
let vStartAt=$(vStartAt)+$(vPageSize);
Loop while vStartAt<vTotal;
Concatenate
LOAD
"date",
name,
"Vendor key",
"T2 Key",
"Series key",
price,
"CPU Key",
"GPU Key",
"SSD key",
"RAM Key",
"OS Key",
"Resolution key",
"Size key",
"Freq key"
FROM [lib://Tracker (lenak)/Price/brandly.qvd]
(qvd);
Hi, before te do...while buce initilize the table, and each iteration should add data to this table, then add the concatenate specifying wich table to concatenate:
LIB CONNECT TO 'Price-expert (lenak)';
RestConnectorMasterTable:
SQL SELECT
"token"
FROM JSON (wrap on) "root";
LOAD [token] AS [token]
RESIDENT RestConnectorMasterTable;
DROP TABLE RestConnectorMasterTable;
LET vAccessTokenValue = Peek('token');
trace $(vAccessTokenValue);
Set vTotal = 0;
Set vStartAt = 0;
Set vPageSize = 1000;
// Initialize table
[PriceExpertOfferAttributeDaily]:
NoConcatenate load * inline [dumbField];
do
RestConnectorMasterTable:
SQL SELECT
"count",
"__KEY_root",
(SELECT
"date",
"name",
"Vendor key",
"T2 Key",
"Series key",
"price",
"CPU Key",
"GPU key",
"SSD key",
"RAM Key",
"OS key",
"Resolution key",
"Size key",
"Freq key",
"__FK_data"
FROM "data" FK "__FK_data")
FROM JSON (wrap on) "root" PK "__KEY_root"
WITH CONNECTION (
URL "http://173.999.13.185/api/api-integration-v1/resource/offer-attribute-daily",
HTTPHEADER "X-HTTP-Method-Override" "GET",
HTTPHEADER "Authorization" "Bearer $(vAccessTokenValue)",
QUERY "offset" "$(vStartAt)",
QUERY "limit" "$(vPageSize)"
);
// Add each pagintation to the table otuside the bucle
Concatenate ([PriceExpertOfferAttributeDaily])
LOAD
[date],
[name],
[Vendor key],
[T2 Key],
[Series key],
[price],
[CPU Key],
[GPU key],
[SSD key],
[RAM Key],
[OS key],
[Resolution key],
[Size key],
[Freq key]
RESIDENT RestConnectorMasterTable;
// Set a name to this table so you can drop it and remove the messing tables
tmpCount:
LOAD [count] AS [total]
RESIDENT RestConnectorMasterTable;
DROP TABLE RestConnectorMasterTable;
let vTotal = Peek('total');
DROP Table tmpCount;
trace $(vTotal);
let vStartAt=$(vStartAt)+$(vPageSize);
Loop while vStartAt<vTotal;
DROP Field dumbField;
Concatenate ([PriceExpertOfferAttributeDaily])
LOAD
"date",
name,
"Vendor key",
"T2 Key",
"Series key",
price,
"CPU Key",
"GPU Key",
"SSD key",
"RAM Key",
"OS Key",
"Resolution key",
"Size key",
"Freq key"
FROM [lib://Tracker (lenak)/Price/brandly.qvd]
(qvd);
STORE [PriceExpertOfferAttributeDaily] into [lib://Tracker (lenak)/Price/brandly.qvd](qvd);
Hi, if you want to keep only the daily data you just have to avoid the "concatenate" part.
If you want to replace the date loaded in the first dataset you can add a "where not Exists()" to the second dataset:
tmpDateLoaded:
LOAD distinct date as dateloaded resident PriceExpertOffer...;
Concatenate LOAD
...
FROM [lib://Tracker (lenak)/Price/brandly.qvd]
(qvd)
WHERE not Exists('dateloaded',date);
DROP table tmpDateLoaded;
If you want to keep the qvd with the last 30 days you can set the date on a variable and use that in the where:
LET vStartDate=Num(Today()-30);
Concatenate LOAD
...
FROM [lib://Tracker (lenak)/Price/brandly.qvd]
(qvd)
WHERE date>=$(vStartDate);
Do a backup of your current qvd before doing the tests so you can recover it if doesn't works at expected.
Hi, Thanks @rubenmarin . QVD File (brandly.qvd) should be overwritten by the daily data incrementally.
What about STORE QV_Table INTO brandly.QVD file? Should not be at the end of the code?
Hi, your initial code already adds the daily data to the qvd, you can add the "where exists" option to avoid duplicated values in case some day the code is executed more than once.
And yes, you always need an STORE sentence to write the data to qvd.
STORE PriceExpertOfferAtributeDaily into [lib://Tracker (lenak)/Price/brandly.qvd](qvd);
@rubenmarin, hmm... there's some problem
this is where the error occurred
Table 'PriceExpertOfferAtributeDaily' not found
Any idea what could be the reason?
Hi, add an "exit script;" before the store and check the name of tables loaded in data model at that point.
"Store into" function is working if I add square brackets [PriceExpertOfferAtributeDaily] however, once I overwrite brandly.qvd with daily data where I have 30 days, I lose all history. On the other hand there is some mess in the tables ;/
Hi, before te do...while buce initilize the table, and each iteration should add data to this table, then add the concatenate specifying wich table to concatenate:
LIB CONNECT TO 'Price-expert (lenak)';
RestConnectorMasterTable:
SQL SELECT
"token"
FROM JSON (wrap on) "root";
LOAD [token] AS [token]
RESIDENT RestConnectorMasterTable;
DROP TABLE RestConnectorMasterTable;
LET vAccessTokenValue = Peek('token');
trace $(vAccessTokenValue);
Set vTotal = 0;
Set vStartAt = 0;
Set vPageSize = 1000;
// Initialize table
[PriceExpertOfferAttributeDaily]:
NoConcatenate load * inline [dumbField];
do
RestConnectorMasterTable:
SQL SELECT
"count",
"__KEY_root",
(SELECT
"date",
"name",
"Vendor key",
"T2 Key",
"Series key",
"price",
"CPU Key",
"GPU key",
"SSD key",
"RAM Key",
"OS key",
"Resolution key",
"Size key",
"Freq key",
"__FK_data"
FROM "data" FK "__FK_data")
FROM JSON (wrap on) "root" PK "__KEY_root"
WITH CONNECTION (
URL "http://173.999.13.185/api/api-integration-v1/resource/offer-attribute-daily",
HTTPHEADER "X-HTTP-Method-Override" "GET",
HTTPHEADER "Authorization" "Bearer $(vAccessTokenValue)",
QUERY "offset" "$(vStartAt)",
QUERY "limit" "$(vPageSize)"
);
// Add each pagintation to the table otuside the bucle
Concatenate ([PriceExpertOfferAttributeDaily])
LOAD
[date],
[name],
[Vendor key],
[T2 Key],
[Series key],
[price],
[CPU Key],
[GPU key],
[SSD key],
[RAM Key],
[OS key],
[Resolution key],
[Size key],
[Freq key]
RESIDENT RestConnectorMasterTable;
// Set a name to this table so you can drop it and remove the messing tables
tmpCount:
LOAD [count] AS [total]
RESIDENT RestConnectorMasterTable;
DROP TABLE RestConnectorMasterTable;
let vTotal = Peek('total');
DROP Table tmpCount;
trace $(vTotal);
let vStartAt=$(vStartAt)+$(vPageSize);
Loop while vStartAt<vTotal;
DROP Field dumbField;
Concatenate ([PriceExpertOfferAttributeDaily])
LOAD
"date",
name,
"Vendor key",
"T2 Key",
"Series key",
price,
"CPU Key",
"GPU Key",
"SSD key",
"RAM Key",
"OS Key",
"Resolution key",
"Size key",
"Freq key"
FROM [lib://Tracker (lenak)/Price/brandly.qvd]
(qvd);
STORE [PriceExpertOfferAttributeDaily] into [lib://Tracker (lenak)/Price/brandly.qvd](qvd);
Thank you so much @rubenmarin, you are great! All seem to be working fine.