Commit f42429f6 authored by bailuo's avatar bailuo
Browse files

readme

parents
from time import time
from typing import Iterable, List, Tuple
import fire
import pandas as pd
import torch
from gluonts.dataset import Dataset
from gluonts.model.forecast import Forecast
from gluonts.torch.model.predictor import PyTorchPredictor
from huggingface_hub import hf_hub_download
from tqdm import tqdm
from uni2ts.model.moirai import MoiraiForecast
from src.utils import ExperimentHandler
def get_morai_predictor(
model_size: str,
prediction_length: int,
target_dim: int,
batch_size: int,
) -> PyTorchPredictor:
model = MoiraiForecast.load_from_checkpoint(
checkpoint_path=hf_hub_download(
repo_id=f"Salesforce/moirai-1.0-R-{model_size}",
filename="model.ckpt",
),
prediction_length=prediction_length,
context_length=200,
patch_size="auto",
num_samples=100,
target_dim=target_dim,
feat_dynamic_real_dim=0,
past_feat_dynamic_real_dim=0,
map_location="cuda:0" if torch.cuda.is_available() else "cpu",
)
predictor = model.create_predictor(batch_size)
return predictor
def gluonts_instance_fcst_to_df(
fcst: Forecast,
quantiles: List[float],
model_name: str,
) -> pd.DataFrame:
point_forecast = fcst.mean
h = len(point_forecast)
dates = pd.date_range(
fcst.start_date.to_timestamp(),
freq=fcst.freq,
periods=h,
)
fcst_df = pd.DataFrame(
{
"ds": dates,
"unique_id": fcst.item_id,
model_name: point_forecast,
}
)
for q in quantiles:
fcst_df[f"{model_name}-q-{q}"] = fcst.quantile(q)
return fcst_df
def gluonts_fcsts_to_df(
fcsts: Iterable[Forecast],
quantiles: List[float],
model_name: str,
) -> pd.DataFrame:
df = []
for fcst in tqdm(fcsts):
fcst_df = gluonts_instance_fcst_to_df(fcst, quantiles, model_name)
df.append(fcst_df)
return pd.concat(df).reset_index(drop=True)
def run_moirai(
gluonts_dataset: Dataset,
model_size: str,
horizon: int,
target_dim: int,
batch_size: int,
quantiles: List[float],
) -> Tuple[pd.DataFrame, float, str]:
init_time = time()
predictor = get_morai_predictor(model_size, horizon, target_dim, batch_size)
fcsts = predictor.predict(gluonts_dataset)
model_name = "SalesforceMoirai"
fcsts_df = gluonts_fcsts_to_df(
fcsts,
quantiles=quantiles,
model_name=model_name,
)
total_time = time() - init_time
return fcsts_df, total_time, model_name
def main(dataset: str):
exp = ExperimentHandler(dataset)
fcst_df, total_time, model_name = run_moirai(
gluonts_dataset=exp.gluonts_train_dataset,
model_size="large",
horizon=exp.horizon,
target_dim=1,
batch_size=32,
quantiles=exp.quantiles,
)
exp.save_results(fcst_df, total_time, model_name)
if __name__ == "__main__":
fire.Fire(main)
import os
from time import time
from typing import List, Tuple
os.environ["NIXTLA_NUMBA_RELEASE_GIL"] = "1"
os.environ["NIXTLA_NUMBA_CACHE"] = "1"
import fire
import numpy as np
import pandas as pd
from scipy.stats import norm
from statsforecast import StatsForecast
from statsforecast.models import (
AutoARIMA,
AutoETS,
AutoCES,
DynamicOptimizedTheta,
SeasonalNaive,
)
from src.utils import ExperimentHandler
def run_seasonal_naive(
train_df: pd.DataFrame,
horizon: int,
freq: str,
seasonality: int,
level: List[int],
) -> Tuple[pd.DataFrame, float, str]:
os.environ["NIXTLA_ID_AS_COL"] = "true"
sf = StatsForecast(
models=[SeasonalNaive(season_length=seasonality)],
freq=freq,
n_jobs=-1,
)
model = sf
init_time = time()
fcsts_df = model.forecast(df=train_df, h=horizon, level=level)
total_time = time() - init_time
return fcsts_df, total_time, "SeasonalNaive"
def ensemble_forecasts(
fcsts_df: pd.DataFrame,
quantiles: List[float],
name_models: List[str],
model_name: str,
) -> pd.DataFrame:
fcsts_df[model_name] = fcsts_df[name_models].mean(axis=1).values # type: ignore
# compute quantiles based on the mean of the forecasts
sigma_models = []
for model in name_models:
fcsts_df[f"sigma_{model}"] = fcsts_df[f"{model}-hi-68.27"] - fcsts_df[model]
sigma_models.append(f"sigma_{model}")
fcsts_df[f"std_{model_name}"] = (
fcsts_df[sigma_models].pow(2).sum(axis=1).div(len(sigma_models) ** 2).pow(0.5)
)
z = norm.ppf(quantiles)
q_cols = []
for q, zq in zip(quantiles, z):
q_col = f"{model_name}-q-{q}"
fcsts_df[q_col] = fcsts_df[model_name] + zq * fcsts_df[f"std_{model_name}"]
q_cols.append(q_col)
fcsts_df = fcsts_df[["unique_id", "ds"] + [model_name] + q_cols]
return fcsts_df
def run_statistical_ensemble(
train_df: pd.DataFrame,
horizon: int,
freq: str,
seasonality: int,
quantiles: List[float],
) -> Tuple[pd.DataFrame, float, str]:
os.environ["NIXTLA_ID_AS_COL"] = "true"
models = [
AutoARIMA(season_length=seasonality),
AutoETS(season_length=seasonality),
AutoCES(season_length=seasonality),
DynamicOptimizedTheta(season_length=seasonality),
]
init_time = time()
series_per_core = 15
n_series = train_df["unique_id"].nunique()
n_jobs = min(n_series // series_per_core, os.cpu_count())
sf = StatsForecast(
models=models,
freq=freq,
n_jobs=n_jobs,
)
fcsts_df = sf.forecast(df=train_df, h=horizon, level=[68.27])
name_models = [repr(model) for model in models]
model_name = "StatisticalEnsemble"
fcsts_df = ensemble_forecasts(
fcsts_df,
quantiles,
name_models,
model_name,
)
total_time = time() - init_time
return fcsts_df, total_time, model_name
def main(dataset: str):
exp = ExperimentHandler(dataset)
# seasonal naive benchmark
fcst_df, total_time, model_name = run_seasonal_naive(
train_df=exp.train_df,
horizon=exp.horizon,
freq=exp.freq,
seasonality=exp.seasonality,
level=exp.level,
)
fcst_df = exp.fcst_from_level_to_quantiles(fcst_df, model_name)
exp.save_results(fcst_df, total_time, model_name)
# statistical ensemble
fcst_df, total_time, model_name = run_statistical_ensemble(
train_df=exp.train_df,
horizon=exp.horizon,
freq=exp.freq,
seasonality=exp.seasonality,
quantiles=exp.quantiles,
)
exp.save_results(fcst_df, total_time, model_name)
if __name__ == "__main__":
from statsforecast.utils import AirPassengers as ap
AutoARIMA(season_length=12).forecast(ap.astype(np.float32), h=12)
fire.Fire(main)
from functools import partial
from pathlib import Path
from typing import List
import numpy as np
import pandas as pd
from gluonts.dataset import Dataset
from gluonts.dataset.repository.datasets import (
get_dataset,
dataset_names as gluonts_datasets,
)
from gluonts.time_feature.seasonality import get_seasonality
from utilsforecast.evaluation import evaluate
from utilsforecast.losses import mase, smape
def quantile_loss(
df: pd.DataFrame,
models: list,
q: float = 0.5,
id_col: str = "unique_id",
target_col: str = "y",
) -> pd.DataFrame:
delta_y = df[models].sub(df[target_col], axis=0)
res = (
np.maximum(q * delta_y, (q - 1) * delta_y)
.groupby(df[id_col], observed=True)
.mean()
)
res.index.name = id_col
res = res.reset_index()
return res
class ExperimentHandler:
def __init__(
self,
dataset: str,
quantiles: List[float] = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
results_dir: str = "./results",
models_dir: str = "./models",
):
if dataset not in gluonts_datasets:
raise Exception(
f"dataset {dataset} not found in gluonts "
f"available datasets: {', '.join(gluonts_datasets)}"
)
self.dataset = dataset
self.quantiles = quantiles
self.level = self._transform_quantiles_to_levels(quantiles)
self.results_dir = results_dir
self.models_dir = models_dir
# defining datasets
self._maybe_download_m3_file(self.dataset)
gluonts_dataset = get_dataset(self.dataset)
self.horizon = gluonts_dataset.metadata.prediction_length
if self.horizon is None:
raise Exception(
f"horizon not found for dataset {self.dataset} "
"experiment cannot be run"
)
self.freq = gluonts_dataset.metadata.freq
self.seasonality = get_seasonality(self.freq)
self.gluonts_train_dataset = gluonts_dataset.train
self.gluonts_test_dataset = gluonts_dataset.test
self._create_dir_if_not_exists(self.results_dir)
@staticmethod
def _maybe_download_m3_file(dataset: str):
if dataset[:2] == "m3":
m3_file = Path.home() / ".gluonts" / "datasets" / "M3C.xls"
if not m3_file.exists():
from datasetsforecast.m3 import M3
from datasetsforecast.utils import download_file
download_file(m3_file.parent, M3.source_url)
@staticmethod
def _transform_quantiles_to_levels(quantiles: List[float]) -> List[int]:
level = [
int(100 - 200 * q) for q in quantiles if q < 0.5
] # in this case mean=mediain
level = sorted(list(set(level)))
return level
@staticmethod
def _create_dir_if_not_exists(directory: str):
Path(directory).mkdir(parents=True, exist_ok=True)
@staticmethod
def _transform_gluonts_instance_to_df(
ts: dict,
last_n: int | None = None,
) -> pd.DataFrame:
start_period = ts["start"]
start_ds, freq = start_period.to_timestamp(), start_period.freq
target = ts["target"]
ds = pd.date_range(start=start_ds, freq=freq, periods=len(target))
if last_n is not None:
target = target[-last_n:]
ds = ds[-last_n:]
ts_df = pd.DataFrame({"unique_id": ts["item_id"], "ds": ds, "y": target})
return ts_df
@staticmethod
def _transform_gluonts_dataset_to_df(
gluonts_dataset: Dataset,
last_n: int | None = None,
) -> pd.DataFrame:
df = pd.concat(
[
ExperimentHandler._transform_gluonts_instance_to_df(ts, last_n=last_n)
for ts in gluonts_dataset
]
)
df = df.reset_index(drop=True)
return df
@property
def train_df(self) -> pd.DataFrame:
train_df = self._transform_gluonts_dataset_to_df(self.gluonts_train_dataset)
return train_df
@property
def test_df(self) -> pd.DataFrame:
test_df = self._transform_gluonts_dataset_to_df(
self.gluonts_test_dataset,
last_n=self.horizon,
)
return test_df
def save_dataframe(self, df: pd.DataFrame, file_name: str):
df.to_csv(f"{self.results_dir}/{file_name}", index=False)
def save_results(self, fcst_df: pd.DataFrame, total_time: float, model_name: str):
self.save_dataframe(
fcst_df,
f"{model_name}-{self.dataset}-fcst.csv",
)
time_df = pd.DataFrame({"time": [total_time], "model": model_name})
self.save_dataframe(
time_df,
f"{model_name}-{self.dataset}-time.csv",
)
def fcst_from_level_to_quantiles(
self,
fcst_df: pd.DataFrame,
model_name: str,
) -> pd.DataFrame:
fcst_df = fcst_df.copy()
cols = ["unique_id", "ds", model_name]
for q in self.quantiles:
if q == 0.5:
col = f"{model_name}"
else:
lv = int(100 - 200 * q)
hi_or_lo = "lo" if lv > 0 else "hi"
lv = abs(lv)
col = f"{model_name}-{hi_or_lo}-{lv}"
q_col = f"{model_name}-q-{q}"
fcst_df[q_col] = fcst_df[col].values
cols.append(q_col)
return fcst_df[cols]
def evaluate_models(self, models: List[str]) -> pd.DataFrame:
test_df = self.test_df
train_df = self.train_df
fcsts_df = []
times_df = []
for model in models:
fcst_method_df = pd.read_csv(
f"{self.results_dir}/{model}-{self.dataset}-fcst.csv"
).set_index(["unique_id", "ds"])
fcsts_df.append(fcst_method_df)
time_method_df = pd.read_csv(
f"{self.results_dir}/{model}-{self.dataset}-time.csv"
)
times_df.append(time_method_df)
fcsts_df = pd.concat(fcsts_df, axis=1).reset_index()
fcsts_df["ds"] = pd.to_datetime(fcsts_df["ds"])
times_df = pd.concat(times_df)
test_df = test_df.merge(fcsts_df, how="left")
assert test_df.isna().sum().sum() == 0, "merge contains nas"
# point evaluation
point_fcsts_cols = ["unique_id", "ds", "y"] + models
test_df["unique_id"] = test_df["unique_id"].astype(str)
train_df["unique_id"] = train_df["unique_id"].astype(str)
mase_seas = partial(mase, seasonality=self.seasonality)
eval_df = evaluate(
test_df[point_fcsts_cols],
train_df=train_df,
metrics=[smape, mase_seas],
)
# probabilistic evaluation
eval_prob_df = []
for q in self.quantiles:
prob_cols = [f"{model}-q-{q}" for model in models]
eval_q_df = quantile_loss(test_df, models=prob_cols, q=q)
eval_q_df[prob_cols] = eval_q_df[prob_cols] * self.horizon
eval_q_df = eval_q_df.rename(columns=dict(zip(prob_cols, models)))
eval_q_df["metric"] = f"quantile-loss-{q}"
eval_prob_df.append(eval_q_df)
eval_prob_df = pd.concat(eval_prob_df)
eval_prob_df = eval_prob_df.groupby("metric").sum().reset_index()
total_y = test_df["y"].sum()
eval_prob_df[models] = eval_prob_df[models] / total_y
eval_prob_df["metric"] = "scaled_crps"
eval_df = pd.concat([eval_df, eval_prob_df]).reset_index(drop=True)
eval_df = eval_df.groupby("metric").mean(numeric_only=True).reset_index()
eval_df = eval_df.melt(id_vars="metric", value_name="value", var_name="model")
times_df.insert(0, "metric", "time")
times_df = times_df.rename(columns={"time": "value"})
eval_df = pd.concat([eval_df, times_df])
eval_df.insert(0, "dataset", self.dataset)
eval_df = eval_df.sort_values(["dataset", "metric", "model"])
eval_df = eval_df.reset_index(drop=True)
return eval_df
download_data:
mkdir -p data
curl https://www.datasource.ai/attachments/eyJpZCI6Ijk4NDYxNjE2NmZmZjM0MGRmNmE4MTczOGMyMzI2ZWI2LmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiUGhhc2UgMCAtIFNhbGVzLmNzdiIsInNpemUiOjEwODA0NjU0LCJtaW1lX3R5cGUiOiJ0ZXh0L2NzdiJ9fQ -o data/phase_0_sales.csv
curl https://www.datasource.ai/attachments/eyJpZCI6ImM2OGQxNGNmNTJkZDQ1MTUyZTg0M2FkMDAyMjVlN2NlLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiUGhhc2UgMSAtIFNhbGVzLmNzdiIsInNpemUiOjEwMTgzOTYsIm1pbWVfdHlwZSI6InRleHQvY3N2In19 -o data/phase_1_sales.csv
curl https://www.datasource.ai/attachments/eyJpZCI6IjhlNmJmNmU3ZTlhNWQ4NTcyNGVhNTI4YjAwNTk3OWE1LmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiUGhhc2UgMiAtIFNhbGVzLmNzdiIsInNpemUiOjEwMTI0MzcsIm1pbWVfdHlwZSI6InRleHQvY3N2In19 -o data/phase_2_sales.csv
curl https://www.datasource.ai/attachments/eyJpZCI6IjI1NDQxYmMyMTQ3MTA0MjJhMDcyYjllODcwZjEyNmY4LmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoicGhhc2UgMiBzdWJtaXNzaW9uIGV4YW1pbmUgc21vb3RoZWQgMjAyNDEwMTcgRklOQUwuY3N2Iiwic2l6ZSI6MTk5MzAzNCwibWltZV90eXBlIjoidGV4dC9jc3YifX0 -o data/solution_1st_place.csv
curl https://www.datasource.ai/attachments/eyJpZCI6IjU3ODhjZTUwYTU3MTg3NjFlYzMzOWU0ZTg3MWUzNjQxLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoidm4xX3N1Ym1pc3Npb25fanVzdGluX2Z1cmxvdHRlLmNzdiIsInNpemUiOjM5MDkzNzksIm1pbWVfdHlwZSI6InRleHQvY3N2In19 -o data/solution_2nd_place.csv
curl https://www.datasource.ai/attachments/eyJpZCI6ImE5NzcwNTZhMzhhMTc2ZWJjODFkMDMwMTM2Y2U2MTdlLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiYXJzYW5pa3phZF9zdWIuY3N2Iiwic2l6ZSI6Mzg4OTcyNCwibWltZV90eXBlIjoidGV4dC9jc3YifX0 -o data/solution_3rd_place.csv
curl https://www.datasource.ai/attachments/eyJpZCI6ImVlZmUxYWY2NDFjOWMwM2IxMzRhZTc2MzI1Nzg3NzIxLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiVEZUX3R1bmVkX1YyX3NlZWRfNDIuY3N2Iiwic2l6ZSI6NjA3NDgzLCJtaW1lX3R5cGUiOiJ0ZXh0L2NzdiJ9fQ -o data/solution_4th_place.csv
curl https://www.datasource.ai/attachments/eyJpZCI6IjMwMDEwMmY3NTNhMzlhN2YxNTk3ODYxZTI1N2Q2NzRmLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiZGl2aW5lb3B0aW1pemVkd2VpZ2h0c2Vuc2VtYmxlLmNzdiIsInNpemUiOjE3OTU0NzgsIm1pbWVfdHlwZSI6InRleHQvY3N2In19 -o data/solution_5th_place.csv
# Score 2nd place in the VN1 Challenge with a few lines of code in under 10 seconds using TimeGPT
We present a fully reproducible experiment demonstrating that Nixtla's **TimeGPT** can achieve the **2nd position** in the [VN1 Forecasting Accuracy Challenge](https://www.datasource.ai/en/home/data-science-competitions-for-startups/phase-2-vn1-forecasting-accuracy-challenge/description) with **zero-shot forecasting**. This result was achieved using the zero-shot capabilities of the foundation model, as most of the code focuses on **data cleaning and preprocessing**, not model training or parameter tuning.
The table below showcases the official competition results, with TimeGPT outperforming the 2nd, 3rd, and other models in the competition.
| **Model** | **Score** |
| ----------- | ---------- |
| 1st | 0.4637 |
| **TimeGPT** | **0.4651** |
| 2nd | 0.4657 |
| 3rd | 0.4758 |
| 4th | 0.4774 |
| 5th | 0.4808 |
---
## **Introduction**
The VN1 Forecasting Accuracy Challenge tasked participants with forecasting future sales using historical sales and pricing data. The goal was to develop robust predictive models capable of anticipating sales trends for various products across different clients and warehouses. Submissions were evaluated based on their accuracy and bias against actual sales figures.
The competition was structured into two phases:
- **Phase 1** (September 12 - October 3, 2024): Participants used the provided Phase 0 sales data to predict sales for Phase 1. This phase lasted three weeks and featured live leaderboard updates to track participant progress.
- **Phase 2** (October 3 - October 17, 2024): Participants utilized both Phase 0 and Phase 1 data to predict sales for Phase 2. Unlike Phase 1, there were no leaderboard updates during this phase until the competition concluded.
One of the competition's key requirements was to use **open-source solutions**. However, as TimeGPT works through an API, we did not upload the forecasts generated during the competition. Instead, we showcase the effectiveness of TimeGPT by presenting the results of our approach.
Our approach leverages the power of **zero-shot forecasting**, where no training, fine-tuning, or manual hyperparameter adjustments are needed. We used only **historical sales data** without any exogenous variables to generate forecasts. With this setting, TimeGPT provides forecasts that achieve an accuracy surpassing nearly all competitors.
Remarkably, the process required only **5 seconds of inference time**, demonstrating the efficiency of TimeGPT.
---
## **Empirical Evaluation**
This study considers time series from multiple datasets provided during the competition. Unlike most competitors, we do not train, fine-tune, or manually adjust TimeGPT. Instead, we rely on **zero-shot learning** to forecast the time series directly.
This study contrasts TimeGPT's zero-shot forecasts against the top 1st, 2nd, and 3rd models submitted to the competition. Our evaluation method follows the official rules and metrics of the VN1 competition.
An R version of this study is also available via `nixtlar`, a CRAN package that provides an interface to Nixtla's TimeGPT.
---
## **Results**
The table below summarizes the official competition results. Despite using a zero-shot approach, **TimeGPT achieves the 2nd position** with a score of **0.4651**, outperforming the models ranked 2nd and 3rd.
| **Model** | **Score** |
| ----------- | ---------- |
| 1st | 0.4637 |
| **TimeGPT** | **0.4651** |
| 2nd | 0.4657 |
| 3rd | 0.4758 |
| 4th | 0.4774 |
| 5th | 0.4808 |
---
## **Reproducibility**
All necessary code and detailed instructions for reproducing the experiment are available in this repository.
### **Instructions**
1. **Get an API Key** from the [Nixtla Dashboard](https://dashboard.nixtla.io/). Copy it and paste it into the `.env.example` file. Rename the file to `.env`.
2. **Set up [uv](https://github.com/astral-sh/uv):**
```bash
pip install uv
uv venv --python 3.10
source .venv/bin/activate
uv pip sync requirements.txt
```
3. **Download data:**
```bash
make download_data
```
4. **Run the complete pipeline:**
```bash
python -m src.main
```
5. **Tests**
We made sure that the results are comparable by comparing the results against the [official competition results](https://www.datasource.ai/en/home/data-science-competitions-for-startups/phase-2-vn1-forecasting-accuracy-challenge/leaderboard). You can run the tests using:
```bash
pytest
```
6. **R results:**
For the R version of this study using `nixtlar`, run the `main.R` script. Make sure the `functions.R` script is in the same directory.
---
## **References**
- Vandeput, Nicolas. “VN1 Forecasting - Accuracy Challenge.” DataSource.ai, DataSource, 3 Oct. 2024, [https://www.datasource.ai/en/home/data-science-competitions-for-startups/phase-2-vn1-forecasting-accuracy-challenge/description](https://www.datasource.ai/en/home/data-science-competitions-for-startups/phase-2-vn1-forecasting-accuracy-challenge/description)
- [TimeGPT Paper](https://arxiv.org/abs/2310.03589)
# Functions for VN1 Forecasting Competition ----
read_and_prepare_data <- function(dataset){
# Reads data in wide format and returns it in long format with columns `unique_id`, `ds`, and `y`
url <- get_dataset_url(dataset)
df_wide <- fread(url)
df_wide <- df_wide |>
mutate(unique_id = paste0(Client, "/", Warehouse, "/", Product)) |>
select(c(unique_id, everything())) |>
select(-c(Client, Warehouse, Product))
df <- pivot_longer(
data = df_wide,
cols = -unique_id,
names_to = "ds",
values_to = "y"
)
if(startsWith(dataset, "winners")){
names(df)[which(names(df) == "y")] <- dataset
}
return(df)
}
get_train_data <- function(df0, df1){
# Merges training data from phase 0 and phase 1 and removes leading zeros
df <- rbind(df0, df1) |>
arrange(unique_id, ds)
df_clean <- df |>
group_by(unique_id) |>
mutate(cumsum = cumsum(y)) |>
filter(cumsum > 0) |>
select(-cumsum) |>
ungroup()
return(df_clean)
}
vn1_competition_evaluation <- function(test, forecast, model){
# Computes competition evaluation
if(!is.character(forecast$ds)){
forecast$ds <- as.character(forecast$ds) # nixtlar returns timestamps for plotting
}
res <- merge(forecast, test, by=c("unique_id", "ds"))
res <- res |>
mutate(abs_err = abs(res[[model]]-res$y)) |>
mutate(err = res[[model]]-res$y)
abs_err = sum(res$abs_err, na.rm = TRUE)
err = sum(res$err, na.rm = TRUE)
score = abs_err+abs(err)
score = score/sum(res$y)
score = round(score, 4)
return(score)
}
get_dataset_url <- function(dataset){
# Returns the url of the given competition dataset
urls <- list(
phase0_sales = "https://www.datasource.ai/attachments/eyJpZCI6Ijk4NDYxNjE2NmZmZjM0MGRmNmE4MTczOGMyMzI2ZWI2LmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiUGhhc2UgMCAtIFNhbGVzLmNzdiIsInNpemUiOjEwODA0NjU0LCJtaW1lX3R5cGUiOiJ0ZXh0L2NzdiJ9fQ",
phase1_sales = "https://www.datasource.ai/attachments/eyJpZCI6ImM2OGQxNGNmNTJkZDQ1MTUyZTg0M2FkMDAyMjVlN2NlLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiUGhhc2UgMSAtIFNhbGVzLmNzdiIsInNpemUiOjEwMTgzOTYsIm1pbWVfdHlwZSI6InRleHQvY3N2In19",
phase2_sales = "https://www.datasource.ai/attachments/eyJpZCI6IjhlNmJmNmU3ZTlhNWQ4NTcyNGVhNTI4YjAwNTk3OWE1LmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiUGhhc2UgMiAtIFNhbGVzLmNzdiIsInNpemUiOjEwMTI0MzcsIm1pbWVfdHlwZSI6InRleHQvY3N2In19",
winners1 = "https://www.datasource.ai/attachments/eyJpZCI6IjI1NDQxYmMyMTQ3MTA0MjJhMDcyYjllODcwZjEyNmY4LmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoicGhhc2UgMiBzdWJtaXNzaW9uIGV4YW1pbmUgc21vb3RoZWQgMjAyNDEwMTcgRklOQUwuY3N2Iiwic2l6ZSI6MTk5MzAzNCwibWltZV90eXBlIjoidGV4dC9jc3YifX0",
winners2 = "https://www.datasource.ai/attachments/eyJpZCI6IjU3ODhjZTUwYTU3MTg3NjFlYzMzOWU0ZTg3MWUzNjQxLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoidm4xX3N1Ym1pc3Npb25fanVzdGluX2Z1cmxvdHRlLmNzdiIsInNpemUiOjM5MDkzNzksIm1pbWVfdHlwZSI6InRleHQvY3N2In19",
winners3 = "https://www.datasource.ai/attachments/eyJpZCI6ImE5NzcwNTZhMzhhMTc2ZWJjODFkMDMwMTM2Y2U2MTdlLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiYXJzYW5pa3phZF9zdWIuY3N2Iiwic2l6ZSI6Mzg4OTcyNCwibWltZV90eXBlIjoidGV4dC9jc3YifX0",
winners4 = "https://www.datasource.ai/attachments/eyJpZCI6ImVlZmUxYWY2NDFjOWMwM2IxMzRhZTc2MzI1Nzg3NzIxLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiVEZUX3R1bmVkX1YyX3NlZWRfNDIuY3N2Iiwic2l6ZSI6NjA3NDgzLCJtaW1lX3R5cGUiOiJ0ZXh0L2NzdiJ9fQ",
winners5 = "https://www.datasource.ai/attachments/eyJpZCI6IjMwMDEwMmY3NTNhMzlhN2YxNTk3ODYxZTI1N2Q2NzRmLmNzdiIsInN0b3JhZ2UiOiJzdG9yZSIsIm1ldGFkYXRhIjp7ImZpbGVuYW1lIjoiZGl2aW5lb3B0aW1pemVkd2VpZ2h0c2Vuc2VtYmxlLmNzdiIsInNpemUiOjE3OTU0NzgsIm1pbWVfdHlwZSI6InRleHQvY3N2In19"
)
return(urls[[dataset]])
}
# VN1 Forecasting Competition Solution with nixtlar ----
install.packages(c("nixtlar", "tidyverse", "data.table"))
library(nixtlar)
library(tidyverse)
library(data.table)
source("functions.R") # same directory as main.R
## Load Data ----
sales0 <- read_and_prepare_data("phase0_sales")
sales1 <- read_and_prepare_data("phase1_sales")
test_df <- read_and_prepare_data("phase2_sales")
## Prepare Training Dataset ----
train_df <- get_train_data(sales0, sales1)
## Generate TimeGPT Forecast ----
# nixtla_client_setup(api_key = "Your API key here")
# Learn how to set up your API key here: https://nixtla.github.io/nixtlar/articles/setting-up-your-api-key.html
fc <- nixtla_client_forecast(train_df, h=13, model="timegpt-1-long-horizon")
## Visualize TimeGPT Forecast ----
nixtla_client_plot(train_df, fc)
## Evaluate TimeGPT & Top 5 Competition Solutions ----
timegpt_score <- vn1_competition_evaluation(test_df, fc, "TimeGPT")
scores <- lapply(1:5, function(i){ # Top 5
winner_df <- read_and_prepare_data(paste0("winners", i))
vn1_competition_evaluation(test_df, winner_df, model = paste0("winners", i))
})
scores_df <- data.frame(
"Result" = c(paste0("Place #", 1:5), "TimeGPT"),
"Score" = c(as.numeric(scores), timegpt_score)
)
scores_df <- scores_df |> arrange(Score)
print(scores_df) # TimeGPT places 2nd!
from time import time
import numpy as np
import pandas as pd
from dotenv import load_dotenv
from nixtla import NixtlaClient
load_dotenv()
def read_and_prepare_data(file_path: str, value_name: str = "y") -> pd.DataFrame:
"""Reads data in wide format, and returns it in long format with columns `unique_id`, `ds`, `y`"""
df = pd.read_csv(file_path)
uid_cols = ["Client", "Warehouse", "Product"]
df["unique_id"] = df[uid_cols].astype(str).agg("-".join, axis=1)
df = df.drop(uid_cols, axis=1)
df = df.melt(id_vars=["unique_id"], var_name="ds", value_name=value_name)
df["ds"] = pd.to_datetime(df["ds"])
df = df.sort_values(by=["unique_id", "ds"])
return df
def get_train_data() -> pd.DataFrame:
"""Reads all train data and returns it in long format with columns `unique_id`, `ds`, `y`"""
train_list = [read_and_prepare_data(f"./data/phase_{i}_sales.csv") for i in [0, 1]]
train_df = pd.concat(train_list).reset_index(drop=True)
train_df = train_df.sort_values(by=["unique_id", "ds"])
def remove_leading_zeros(group):
first_non_zero_index = group["y"].ne(0).idxmax()
return group.loc[first_non_zero_index:]
train_df = (
train_df.groupby("unique_id").apply(remove_leading_zeros).reset_index(drop=True)
)
return train_df
def get_competition_forecasts() -> pd.DataFrame:
"""Reads all competition forecasts and returns it in long format with columns `unique_id`, `ds`, `y`"""
fcst_df: pd.DataFrame | None = None
for place in ["1st", "2nd", "3rd", "4th", "5th"]:
fcst_df_place = read_and_prepare_data(
f"./data/solution_{place}_place.csv", place
)
if fcst_df is None:
fcst_df = fcst_df_place
else:
fcst_df = fcst_df.merge(
fcst_df_place,
on=["unique_id", "ds"],
how="left",
)
return fcst_df
def vn1_competition_evaluation(forecasts: pd.DataFrame) -> pd.DataFrame:
"""Computes competition evaluation scores"""
actual = read_and_prepare_data("./data/phase_2_sales.csv")
res = actual[["unique_id", "ds", "y"]].merge(
forecasts, on=["unique_id", "ds"], how="left"
)
ids_forecasts = forecasts["unique_id"].unique()
ids_res = res["unique_id"].unique()
assert set(ids_forecasts) == set(ids_res), "Some unique_ids are missing"
scores = {}
for model in [col for col in forecasts.columns if col not in ["unique_id", "ds"]]:
abs_err = np.nansum(np.abs(res[model] - res["y"]))
err = np.nansum(res[model] - res["y"])
score = abs_err + abs(err)
score = score / res["y"].sum()
scores[model] = round(score, 4)
score_df = pd.DataFrame(list(scores.items()), columns=["model", "score"])
score_df = score_df.sort_values(by="score")
return score_df
def main():
"""Complete pipeline"""
train_df = get_train_data()
client = NixtlaClient()
init = time()
fcst_df = client.forecast(train_df, h=13, model="timegpt-1-long-horizon")
print(f"TimeGPT time: {time() - init}")
fcst_df_comp = get_competition_forecasts()
fcst_df = fcst_df.merge(fcst_df_comp, on=["unique_id", "ds"], how="left")
eval_df = vn1_competition_evaluation(fcst_df)
print(eval_df)
if __name__ == "__main__":
main()
import pandas as pd
from src.main import vn1_competition_evaluation, get_competition_forecasts
def test_vn1_competition_evaluation():
forecasts = get_competition_forecasts()
eval_df = vn1_competition_evaluation(forecasts)
assert len(eval_df) == 5
pd.testing.assert_series_equal(
eval_df["score"],
pd.Series([0.4637, 0.4657, 0.4758, 0.4774, 0.4808]),
check_names=False,
)
format:
mintlify-md:
code-fold: true
project:
type: mintlify
,unique_id,ds,AutoARIMA
0,FOODS_1,2016-04-25,2567.6380220997385
1,FOODS_1,2016-04-26,2640.0126507600617
2,FOODS_1,2016-04-27,2654.020161270193
3,FOODS_1,2016-04-28,2796.440947432241
4,FOODS_1,2016-04-29,3208.3811781219893
5,FOODS_1,2016-04-30,3773.861663861278
6,FOODS_1,2016-05-01,3299.79743620078
7,FOODS_1,2016-05-02,2640.917833365884
8,FOODS_1,2016-05-03,2694.336636469822
9,FOODS_1,2016-05-04,2694.1035608286916
10,FOODS_1,2016-05-05,2835.3163066249176
11,FOODS_1,2016-05-06,3236.293019356992
12,FOODS_1,2016-05-07,3797.241666185409
13,FOODS_1,2016-05-08,3321.3663551719346
14,FOODS_1,2016-05-09,2657.5206296593224
15,FOODS_1,2016-05-10,2707.524732902354
16,FOODS_1,2016-05-11,2704.747898679939
17,FOODS_1,2016-05-12,2843.9075334840127
18,FOODS_1,2016-05-13,3243.2271452721875
19,FOODS_1,2016-05-14,3802.8383176295374
20,FOODS_1,2016-05-15,3325.8835081296597
21,FOODS_1,2016-05-16,2661.1665009534677
22,FOODS_1,2016-05-17,2710.4673778864567
23,FOODS_1,2016-05-18,2707.1229576998144
24,FOODS_1,2016-05-19,2845.8244841791743
25,FOODS_1,2016-05-20,3244.7743489150994
26,FOODS_1,2016-05-21,3804.0870921121996
27,FOODS_1,2016-05-22,3326.8914153406417
28,FOODS_2,2016-04-25,5247.139313835306
29,FOODS_2,2016-04-26,4792.960953564678
30,FOODS_2,2016-04-27,4590.964924105041
31,FOODS_2,2016-04-28,4600.860960731239
32,FOODS_2,2016-04-29,4942.967875785429
33,FOODS_2,2016-04-30,6337.5344008647035
34,FOODS_2,2016-05-01,6757.903656868389
35,FOODS_2,2016-05-02,5607.448278542931
36,FOODS_2,2016-05-03,5244.736379588703
37,FOODS_2,2016-05-04,5004.324139775963
38,FOODS_2,2016-05-05,4898.957821332703
39,FOODS_2,2016-05-06,5310.50858073175
40,FOODS_2,2016-05-07,6641.501751486881
41,FOODS_2,2016-05-08,6994.60266694966
42,FOODS_2,2016-05-09,5779.319357423172
43,FOODS_2,2016-05-10,5407.162676474501
44,FOODS_2,2016-05-11,5083.913842178118
45,FOODS_2,2016-05-12,4944.398003219373
46,FOODS_2,2016-05-13,5311.376375871703
47,FOODS_2,2016-05-14,6593.071399563353
48,FOODS_2,2016-05-15,6919.214380455565
49,FOODS_2,2016-05-16,5687.59113569716
50,FOODS_2,2016-05-17,5289.695188706264
51,FOODS_2,2016-05-18,4961.060422560739
52,FOODS_2,2016-05-19,4815.1162421391555
53,FOODS_2,2016-05-20,5184.318070407874
54,FOODS_2,2016-05-21,6467.664979048176
55,FOODS_2,2016-05-22,6807.52592255971
56,FOODS_3,2016-04-25,17666.731556718732
57,FOODS_3,2016-04-26,16540.541376421595
58,FOODS_3,2016-04-27,16160.633071055223
59,FOODS_3,2016-04-28,16370.683658640599
60,FOODS_3,2016-04-29,17889.869101221244
61,FOODS_3,2016-04-30,22332.436097780206
62,FOODS_3,2016-05-01,22713.044590532798
63,FOODS_3,2016-05-02,17468.426315986457
64,FOODS_3,2016-05-03,16293.254187261948
65,FOODS_3,2016-05-04,15975.967829497
66,FOODS_3,2016-05-05,16301.448695311468
67,FOODS_3,2016-05-06,17742.192012594733
68,FOODS_3,2016-05-07,22149.920466723906
69,FOODS_3,2016-05-08,22672.51910973226
70,FOODS_3,2016-05-09,17388.755964833897
71,FOODS_3,2016-05-10,16229.554742510712
72,FOODS_3,2016-05-11,15913.853788834986
73,FOODS_3,2016-05-12,16229.441847880662
74,FOODS_3,2016-05-13,17695.119173018684
75,FOODS_3,2016-05-14,22118.38914081269
76,FOODS_3,2016-05-15,22622.383902927766
77,FOODS_3,2016-05-16,17352.915739873653
78,FOODS_3,2016-05-17,16196.863301084244
79,FOODS_3,2016-05-18,15886.258449760904
80,FOODS_3,2016-05-19,16208.436163712715
81,FOODS_3,2016-05-20,17673.61997937043
82,FOODS_3,2016-05-21,22097.648256558292
83,FOODS_3,2016-05-22,22608.340998500338
84,HOBBIES_1,2016-04-25,3301.1184614493914
85,HOBBIES_1,2016-04-26,3190.0305707575
86,HOBBIES_1,2016-04-27,3199.935172884026
87,HOBBIES_1,2016-04-28,3225.695752897986
88,HOBBIES_1,2016-04-29,3567.265955094044
89,HOBBIES_1,2016-04-30,4356.224282294094
90,HOBBIES_1,2016-05-01,4089.925888720474
91,HOBBIES_1,2016-05-02,3256.234765934616
92,HOBBIES_1,2016-05-03,3167.254279226249
93,HOBBIES_1,2016-05-04,3182.164242104323
94,HOBBIES_1,2016-05-05,3197.8208347975133
95,HOBBIES_1,2016-05-06,3562.1759714192
96,HOBBIES_1,2016-05-07,4348.942474784928
97,HOBBIES_1,2016-05-08,4062.088166835995
98,HOBBIES_1,2016-05-09,3270.411197190324
99,HOBBIES_1,2016-05-10,3153.9826259751076
100,HOBBIES_1,2016-05-11,3185.763508441231
101,HOBBIES_1,2016-05-12,3215.5124943559786
102,HOBBIES_1,2016-05-13,3557.0053751276196
103,HOBBIES_1,2016-05-14,4347.883194155448
104,HOBBIES_1,2016-05-15,4072.703812696276
105,HOBBIES_1,2016-05-16,3257.5290353634405
106,HOBBIES_1,2016-05-17,3160.8541642155105
107,HOBBIES_1,2016-05-18,3181.513304571057
108,HOBBIES_1,2016-05-19,3202.9504121022114
109,HOBBIES_1,2016-05-20,3559.2179367013387
110,HOBBIES_1,2016-05-21,4348.099858357742
111,HOBBIES_1,2016-05-22,4064.243078853568
112,HOBBIES_2,2016-04-25,372.73314373276685
113,HOBBIES_2,2016-04-26,394.9539049862272
114,HOBBIES_2,2016-04-27,383.16517087090995
115,HOBBIES_2,2016-04-28,383.62499302365416
116,HOBBIES_2,2016-04-29,398.311355639119
117,HOBBIES_2,2016-04-30,394.347605737723
118,HOBBIES_2,2016-05-01,410.1164983512131
119,HOBBIES_2,2016-05-02,381.5187849900045
120,HOBBIES_2,2016-05-03,390.5415783971641
121,HOBBIES_2,2016-05-04,384.30591664608176
122,HOBBIES_2,2016-05-05,382.0656727697983
123,HOBBIES_2,2016-05-06,391.0787416473706
124,HOBBIES_2,2016-05-07,388.7836691251789
125,HOBBIES_2,2016-05-08,393.876028907937
126,HOBBIES_2,2016-05-09,390.2092112439395
127,HOBBIES_2,2016-05-10,389.2354081364607
128,HOBBIES_2,2016-05-11,388.6840382471152
129,HOBBIES_2,2016-05-12,388.2380749588003
130,HOBBIES_2,2016-05-13,387.8486951350242
131,HOBBIES_2,2016-05-14,387.50441758710645
132,HOBBIES_2,2016-05-15,387.19942014913687
133,HOBBIES_2,2016-05-16,386.9291391176742
134,HOBBIES_2,2016-05-17,386.6896116476871
135,HOBBIES_2,2016-05-18,386.4773369473272
136,HOBBIES_2,2016-05-19,386.28921406142393
137,HOBBIES_2,2016-05-20,386.12249505879066
138,HOBBIES_2,2016-05-21,385.9747446856811
139,HOBBIES_2,2016-05-22,385.84380476581924
140,HOUSEHOLD_1,2016-04-25,7401.891643264893
141,HOUSEHOLD_1,2016-04-26,6713.059783961793
142,HOUSEHOLD_1,2016-04-27,6568.574826414228
143,HOUSEHOLD_1,2016-04-28,6780.766120981868
144,HOUSEHOLD_1,2016-04-29,7572.471090810987
145,HOUSEHOLD_1,2016-04-30,9760.255572651784
146,HOUSEHOLD_1,2016-05-01,9626.516172187357
147,HOUSEHOLD_1,2016-05-02,7339.549091656914
148,HOUSEHOLD_1,2016-05-03,6715.501042329869
149,HOUSEHOLD_1,2016-05-04,6588.688826450386
150,HOUSEHOLD_1,2016-05-05,6792.018975120889
151,HOUSEHOLD_1,2016-05-06,7585.826196179538
152,HOUSEHOLD_1,2016-05-07,9784.576440586516
153,HOUSEHOLD_1,2016-05-08,9625.229656671552
154,HOUSEHOLD_1,2016-05-09,7370.017398852801
155,HOUSEHOLD_1,2016-05-10,6703.274498285182
156,HOUSEHOLD_1,2016-05-11,6599.658566441017
157,HOUSEHOLD_1,2016-05-12,6803.892952779315
158,HOUSEHOLD_1,2016-05-13,7578.147413631544
159,HOUSEHOLD_1,2016-05-14,9797.63840894531
160,HOUSEHOLD_1,2016-05-15,9618.004359773748
161,HOUSEHOLD_1,2016-05-16,7369.325267410535
162,HOUSEHOLD_1,2016-05-17,6704.177617699504
163,HOUSEHOLD_1,2016-05-18,6602.321156728928
164,HOUSEHOLD_1,2016-05-19,6806.125635799577
165,HOUSEHOLD_1,2016-05-20,7579.715540310798
166,HOUSEHOLD_1,2016-05-21,9800.535564906138
167,HOUSEHOLD_1,2016-05-22,9618.816928719863
168,HOUSEHOLD_2,2016-04-25,1944.6365790975688
169,HOUSEHOLD_2,2016-04-26,1782.1136284351692
170,HOUSEHOLD_2,2016-04-27,1783.3160335243492
171,HOUSEHOLD_2,2016-04-28,1843.7479525740052
172,HOUSEHOLD_2,2016-04-29,2017.7306193648328
173,HOUSEHOLD_2,2016-04-30,2573.4319970333827
174,HOUSEHOLD_2,2016-05-01,2555.781560345023
175,HOUSEHOLD_2,2016-05-02,1911.5887343314832
176,HOUSEHOLD_2,2016-05-03,1773.6009268551322
177,HOUSEHOLD_2,2016-05-04,1783.737933427243
178,HOUSEHOLD_2,2016-05-05,1836.4433591054317
179,HOUSEHOLD_2,2016-05-06,2026.2345730343163
180,HOUSEHOLD_2,2016-05-07,2599.273470339867
181,HOUSEHOLD_2,2016-05-08,2547.324959977864
182,HOUSEHOLD_2,2016-05-09,1919.6860442557204
183,HOUSEHOLD_2,2016-05-10,1772.1267493740079
184,HOUSEHOLD_2,2016-05-11,1776.2903338158771
185,HOUSEHOLD_2,2016-05-12,1836.0676051839982
186,HOUSEHOLD_2,2016-05-13,2012.270429155826
187,HOUSEHOLD_2,2016-05-14,2570.3606621455087
188,HOUSEHOLD_2,2016-05-15,2548.3296441260773
189,HOUSEHOLD_2,2016-05-16,1906.4143149755678
190,HOUSEHOLD_2,2016-05-17,1767.280129446266
191,HOUSEHOLD_2,2016-05-18,1776.7401745381599
192,HOUSEHOLD_2,2016-05-19,1830.4754939798
193,HOUSEHOLD_2,2016-05-20,2018.588737506113
194,HOUSEHOLD_2,2016-05-21,2589.769743131061
195,HOUSEHOLD_2,2016-05-22,2541.838195247957
,unique_id,ds,AutoLightGBM
0,FOODS_1,2016-04-25,2694.6738511439944
1,FOODS_1,2016-04-26,2805.230420439614
2,FOODS_1,2016-04-27,2662.0380598102083
3,FOODS_1,2016-04-28,2645.027003376193
4,FOODS_1,2016-04-29,3236.588718297946
5,FOODS_1,2016-04-30,3747.51371444864
6,FOODS_1,2016-05-01,3172.307149809725
7,FOODS_1,2016-05-02,2657.3513449500338
8,FOODS_1,2016-05-03,2691.9787279391626
9,FOODS_1,2016-05-04,2746.2863197867673
10,FOODS_1,2016-05-05,2762.255340569623
11,FOODS_1,2016-05-06,3257.194144588295
12,FOODS_1,2016-05-07,3799.031472854631
13,FOODS_1,2016-05-08,3252.8382373569984
14,FOODS_1,2016-05-09,2786.334069555473
15,FOODS_1,2016-05-10,2820.205574835855
16,FOODS_1,2016-05-11,2762.5056712532564
17,FOODS_1,2016-05-12,2786.88595237666
18,FOODS_1,2016-05-13,3309.4905882795897
19,FOODS_1,2016-05-14,3796.0201508972023
20,FOODS_1,2016-05-15,3273.3752810082988
21,FOODS_1,2016-05-16,2742.19182422212
22,FOODS_1,2016-05-17,2823.468902301943
23,FOODS_1,2016-05-18,2854.8557474617855
24,FOODS_1,2016-05-19,2871.135690726362
25,FOODS_1,2016-05-20,3331.1931287348802
26,FOODS_1,2016-05-21,3781.1473608863216
27,FOODS_1,2016-05-22,3405.544149448268
28,FOODS_2,2016-04-25,5120.102523686289
29,FOODS_2,2016-04-26,5185.27038248254
30,FOODS_2,2016-04-27,5115.111348340718
31,FOODS_2,2016-04-28,4619.394726238464
32,FOODS_2,2016-04-29,5432.537671232106
33,FOODS_2,2016-04-30,6025.41273018486
34,FOODS_2,2016-05-01,6220.986523247984
35,FOODS_2,2016-05-02,4964.5749171463885
36,FOODS_2,2016-05-03,5103.670724855448
37,FOODS_2,2016-05-04,5144.3979512523165
38,FOODS_2,2016-05-05,5218.996763184711
39,FOODS_2,2016-05-06,5439.347347215497
40,FOODS_2,2016-05-07,5865.1687497733365
41,FOODS_2,2016-05-08,6368.92258946046
42,FOODS_2,2016-05-09,5186.848082254273
43,FOODS_2,2016-05-10,4880.374441261076
44,FOODS_2,2016-05-11,4909.098152984957
45,FOODS_2,2016-05-12,5081.108905626409
46,FOODS_2,2016-05-13,5407.15017000438
47,FOODS_2,2016-05-14,5830.236827339688
48,FOODS_2,2016-05-15,6618.276011160659
49,FOODS_2,2016-05-16,5242.077035682606
50,FOODS_2,2016-05-17,5003.983891141501
51,FOODS_2,2016-05-18,4958.840167821236
52,FOODS_2,2016-05-19,4762.949855746844
53,FOODS_2,2016-05-20,5389.750344472496
54,FOODS_2,2016-05-21,5942.651453698519
55,FOODS_2,2016-05-22,6269.243619511553
56,FOODS_3,2016-04-25,17711.385739545563
57,FOODS_3,2016-04-26,16357.865618151673
58,FOODS_3,2016-04-27,16557.86563355973
59,FOODS_3,2016-04-28,16163.752858976526
60,FOODS_3,2016-04-29,18881.115214451296
61,FOODS_3,2016-04-30,23092.123636884276
62,FOODS_3,2016-05-01,23294.58207237022
63,FOODS_3,2016-05-02,17732.312728761386
64,FOODS_3,2016-05-03,16215.758103512571
65,FOODS_3,2016-05-04,16609.186712046285
66,FOODS_3,2016-05-05,17115.71866090232
67,FOODS_3,2016-05-06,18163.574602023327
68,FOODS_3,2016-05-07,22698.015289239014
69,FOODS_3,2016-05-08,22955.84590815469
70,FOODS_3,2016-05-09,17924.510549950868
71,FOODS_3,2016-05-10,16652.368043668524
72,FOODS_3,2016-05-11,16706.713769910348
73,FOODS_3,2016-05-12,17070.997700996042
74,FOODS_3,2016-05-13,18438.390974654023
75,FOODS_3,2016-05-14,22698.015289239014
76,FOODS_3,2016-05-15,22955.84590815469
77,FOODS_3,2016-05-16,18333.211272508892
78,FOODS_3,2016-05-17,16812.741370928095
79,FOODS_3,2016-05-18,16706.713769910348
80,FOODS_3,2016-05-19,16479.679299243857
81,FOODS_3,2016-05-20,18502.532285923797
82,FOODS_3,2016-05-21,22698.015289239014
83,FOODS_3,2016-05-22,22955.84590815469
84,HOBBIES_1,2016-04-25,3226.027840439743
85,HOBBIES_1,2016-04-26,3195.3648189304345
86,HOBBIES_1,2016-04-27,3172.479599244583
87,HOBBIES_1,2016-04-28,3215.297092249357
88,HOBBIES_1,2016-04-29,3550.478555688749
89,HOBBIES_1,2016-04-30,4315.818469198181
90,HOBBIES_1,2016-05-01,4321.600390227421
91,HOBBIES_1,2016-05-02,3157.8276654934016
92,HOBBIES_1,2016-05-03,3208.8238007824716
93,HOBBIES_1,2016-05-04,3171.402461945512
94,HOBBIES_1,2016-05-05,3122.93302254743
95,HOBBIES_1,2016-05-06,3534.892640584684
96,HOBBIES_1,2016-05-07,4261.123354991392
97,HOBBIES_1,2016-05-08,4296.069679132532
98,HOBBIES_1,2016-05-09,3273.3972145241705
99,HOBBIES_1,2016-05-10,3221.5792680507666
100,HOBBIES_1,2016-05-11,3200.345723427872
101,HOBBIES_1,2016-05-12,3245.4991334325614
102,HOBBIES_1,2016-05-13,3568.3027303655
103,HOBBIES_1,2016-05-14,4320.567307842713
104,HOBBIES_1,2016-05-15,4343.116200412291
105,HOBBIES_1,2016-05-16,3289.762766694878
106,HOBBIES_1,2016-05-17,3267.984449690506
107,HOBBIES_1,2016-05-18,3200.345723427872
108,HOBBIES_1,2016-05-19,3208.1360951146366
109,HOBBIES_1,2016-05-20,3615.44813501723
110,HOBBIES_1,2016-05-21,4326.047999208027
111,HOBBIES_1,2016-05-22,4344.900175442336
112,HOBBIES_2,2016-04-25,339.77407251192693
113,HOBBIES_2,2016-04-26,397.4010613540528
114,HOBBIES_2,2016-04-27,382.61025056378617
115,HOBBIES_2,2016-04-28,393.55441552694975
116,HOBBIES_2,2016-04-29,412.81304016665115
117,HOBBIES_2,2016-04-30,416.1985117603962
118,HOBBIES_2,2016-05-01,467.8505902120062
119,HOBBIES_2,2016-05-02,359.72499730411994
120,HOBBIES_2,2016-05-03,400.9547760594411
121,HOBBIES_2,2016-05-04,384.0621884160096
122,HOBBIES_2,2016-05-05,384.0621884160096
123,HOBBIES_2,2016-05-06,412.81304016665115
124,HOBBIES_2,2016-05-07,405.2543467972324
125,HOBBIES_2,2016-05-08,467.8505902120062
126,HOBBIES_2,2016-05-09,380.6591071937099
127,HOBBIES_2,2016-05-10,400.9547760594411
128,HOBBIES_2,2016-05-11,400.9547760594411
129,HOBBIES_2,2016-05-12,400.9547760594411
130,HOBBIES_2,2016-05-13,412.81304016665115
131,HOBBIES_2,2016-05-14,416.1985117603962
132,HOBBIES_2,2016-05-15,467.8505902120062
133,HOBBIES_2,2016-05-16,390.6243107525114
134,HOBBIES_2,2016-05-17,411.89894102260484
135,HOBBIES_2,2016-05-18,400.9547760594411
136,HOBBIES_2,2016-05-19,400.9547760594411
137,HOBBIES_2,2016-05-20,412.81304016665115
138,HOBBIES_2,2016-05-21,416.1985117603962
139,HOBBIES_2,2016-05-22,467.8505902120062
140,HOUSEHOLD_1,2016-04-25,7160.832175743806
141,HOUSEHOLD_1,2016-04-26,6971.744965808053
142,HOUSEHOLD_1,2016-04-27,6676.608231213473
143,HOUSEHOLD_1,2016-04-28,6640.7455196004885
144,HOUSEHOLD_1,2016-04-29,7565.152447398786
145,HOUSEHOLD_1,2016-04-30,9465.935601433835
146,HOUSEHOLD_1,2016-05-01,9702.66861697589
147,HOUSEHOLD_1,2016-05-02,7205.825513464553
148,HOUSEHOLD_1,2016-05-03,7113.679030081171
149,HOUSEHOLD_1,2016-05-04,6649.31399566832
150,HOUSEHOLD_1,2016-05-05,6674.348708680196
151,HOUSEHOLD_1,2016-05-06,7697.496157413757
152,HOUSEHOLD_1,2016-05-07,9399.00971175823
153,HOUSEHOLD_1,2016-05-08,9439.07736476184
154,HOUSEHOLD_1,2016-05-09,7493.79860897777
155,HOUSEHOLD_1,2016-05-10,7002.1844984992185
156,HOUSEHOLD_1,2016-05-11,6661.775405589411
157,HOUSEHOLD_1,2016-05-12,6764.480000367033
158,HOUSEHOLD_1,2016-05-13,7598.947495707824
159,HOUSEHOLD_1,2016-05-14,9399.00971175823
160,HOUSEHOLD_1,2016-05-15,9439.07736476184
161,HOUSEHOLD_1,2016-05-16,7534.459703980786
162,HOUSEHOLD_1,2016-05-17,7229.353310975897
163,HOUSEHOLD_1,2016-05-18,6699.882771137218
164,HOUSEHOLD_1,2016-05-19,6764.480000367033
165,HOUSEHOLD_1,2016-05-20,7697.496157413757
166,HOUSEHOLD_1,2016-05-21,9399.00971175823
167,HOUSEHOLD_1,2016-05-22,9314.773035693921
168,HOUSEHOLD_2,2016-04-25,1891.769292418806
169,HOUSEHOLD_2,2016-04-26,1805.813605315031
170,HOUSEHOLD_2,2016-04-27,1758.0613497523343
171,HOUSEHOLD_2,2016-04-28,1800.1115326528136
172,HOUSEHOLD_2,2016-04-29,1959.4740469519804
173,HOUSEHOLD_2,2016-04-30,2660.449142902388
174,HOUSEHOLD_2,2016-05-01,2619.654508404448
175,HOUSEHOLD_2,2016-05-02,1839.0080089551213
176,HOUSEHOLD_2,2016-05-03,1829.4970368277027
177,HOUSEHOLD_2,2016-05-04,1823.5007626046254
178,HOUSEHOLD_2,2016-05-05,1811.520891959066
179,HOUSEHOLD_2,2016-05-06,1977.7038269928182
180,HOUSEHOLD_2,2016-05-07,2816.242000429492
181,HOUSEHOLD_2,2016-05-08,2634.914005288675
182,HOUSEHOLD_2,2016-05-09,1878.4366945999661
183,HOUSEHOLD_2,2016-05-10,1815.8433855980848
184,HOUSEHOLD_2,2016-05-11,1811.1689759986775
185,HOUSEHOLD_2,2016-05-12,1811.520891959066
186,HOUSEHOLD_2,2016-05-13,1977.7038269928182
187,HOUSEHOLD_2,2016-05-14,2753.4834680201943
188,HOUSEHOLD_2,2016-05-15,2586.938159190155
189,HOUSEHOLD_2,2016-05-16,1857.353491565642
190,HOUSEHOLD_2,2016-05-17,1829.4970368277027
191,HOUSEHOLD_2,2016-05-18,1811.520891959066
192,HOUSEHOLD_2,2016-05-19,1811.520891959066
193,HOUSEHOLD_2,2016-05-20,1977.7038269928182
194,HOUSEHOLD_2,2016-05-21,2861.6326484548927
195,HOUSEHOLD_2,2016-05-22,2645.9665234893946
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment