#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#include <sys/time.h>
+#include <time.h>
#include <json.h>
#include "fcache.h"
#include "http.h"
#include "list.h"
-#include "log.h"
#include "lp_ws.h"
#include "lp_json.h"
+#include <plog.h>
#include "ppastats.h"
+#include <ptime.h>
/** Default ws.size value for the getPublishedBinaries request. */
static const int DEFAULT_WS_SIZE = 150;
return key;
}
+static char *get_ddts_list_cache_key(const char *url)
+{
+ char *key;
+
+ key = malloc(strlen(url + 7) + strlen("/ddts") + 1);
+ sprintf(key, "%s/ddts", url + 7);
+
+ return key;
+}
+
static struct bpph **get_bpph_list_from_cache(const char *key)
{
char *content;
}
if (last)
- return time_to_str(last);
+ return time_to_ISO8601_time(&last);
else
return NULL;
}
date = get_last_creation_date(result);
if (date) {
- printf("Update package since: %s\n", date);
-
tmp = malloc(strlen(url)
+ strlen("&created_since_date=")
+ strlen(date)+1);
return distro;
}
-struct daily_download_total **get_daily_download_totals(const char *binary_url)
+/*
+ Convert ddts older than 4 weeks to the same JSON representation than
+ the LP one. Newer ddts are not stored in the cache because the data
+ may change during following days. It avoids to miss downloads which
+ are not yet taken in consideration by LP.
+ */
+static json_object *ddts_to_json_for_cache(struct daily_download_total **ddts)
{
- char *url;
- json_object *obj;
- struct daily_download_total **result = NULL;
+ json_object *j_ddts;
+ struct daily_download_total *ddt;
+ char *date;
+ struct timeval *tv;
+ time_t t;
+ double d;
- url = malloc(strlen(binary_url)+
- strlen(QUERY_GET_DAILY_DOWNLOAD_TOTALS)+1);
+ j_ddts = json_object_new_object();
- strcpy(url, binary_url);
- strcat(url, QUERY_GET_DAILY_DOWNLOAD_TOTALS);
+ tv = malloc(sizeof(struct timeval));
+ gettimeofday(tv, NULL);
- obj = get_json_object(url);
+ while (ddts && *ddts) {
+ ddt = *ddts;
- if (obj) {
- result = json_object_to_daily_download_totals(obj);
- json_object_put(obj);
+ t = mktime(&(ddt->date));
+
+ d = difftime(tv->tv_sec, t);
+
+ if (d > 4 * 7 * 24 * 60 * 60) { /* older than 4 weeks */
+ date = tm_to_ISO8601_date(&ddt->date);
+ json_object_object_add(j_ddts,
+ date,
+ json_object_new_int(ddt->count));
+ free(date);
+ }
+
+ ddts++;
}
+ free(tv);
+
+ return j_ddts;
+}
+
+char *create_ddts_query(const char *binary_url, time_t st)
+{
+ char *q;
+ char *sdate;
+
+ if (st) {
+ sdate = time_to_ISO8601_date(&st);
+
+ q = malloc(strlen(binary_url)
+ + strlen(QUERY_GET_DAILY_DOWNLOAD_TOTALS)
+ + strlen("&start_date=YYYY-MM-DD")
+ + 1);
+ strcpy(q, binary_url);
+ strcat(q, QUERY_GET_DAILY_DOWNLOAD_TOTALS);
+ strcat(q, "&start_date=");
+ strcat(q, sdate);
+
+ free(sdate);
+ } else {
+ q = malloc(strlen(binary_url)
+ + strlen(QUERY_GET_DAILY_DOWNLOAD_TOTALS)
+ + 1);
+ strcpy(q, binary_url);
+ strcat(q, QUERY_GET_DAILY_DOWNLOAD_TOTALS);
+ }
+
+ return q;
+}
+
+struct daily_download_total **get_daily_download_totals(const char *binary_url)
+{
+ char *url, *key, *content;
+ json_object *j_ddts, *json;
+ struct daily_download_total **retrieved_ddts = NULL;
+ struct daily_download_total **cached_ddts;
+ struct daily_download_total **ddts;
+ time_t last_t;
+
+ key = get_ddts_list_cache_key(binary_url);
+
+ content = fcache_get(key);
+ if (content)
+ json = json_tokener_parse(content);
+ else
+ json = NULL;
+
+ if (json) {
+ cached_ddts = json_object_to_daily_download_totals(json);
+ last_t = ddts_get_last_date(cached_ddts);
+ } else {
+ last_t = 0;
+ cached_ddts = NULL;
+ }
+
+ url = create_ddts_query(binary_url, last_t);
+
+ json = get_json_object(url);
+
free(url);
- return result;
+ if (json) {
+ retrieved_ddts = json_object_to_daily_download_totals(json);
+
+ ddts = ddts_merge(cached_ddts, retrieved_ddts);
+
+ json_object_put(json);
+ j_ddts = ddts_to_json_for_cache(ddts);
+ fcache_put(key, json_object_get_string(j_ddts));
+ json_object_put(j_ddts);
+ } else {
+ ddts = NULL;
+ }
+
+ free(key);
+ free(cached_ddts);
+ free(retrieved_ddts);
+
+ return ddts;
}