added --get-bpph-size to specify the size of the requests to get the list of binary...
[ppastats.git] / src / lp_ws.c
index 63a8d36..3086e6f 100644 (file)
@@ -20,6 +20,7 @@
 #include <libintl.h>
 #define _(String) gettext(String)
 
+#include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
 
@@ -34,8 +35,9 @@
 #include "lp_json.h"
 #include "ppastats.h"
 
-static const char *
-QUERY_GET_PUBLISHED_BINARIES = "?ws.op=getPublishedBinaries&ws.size=300";
+/** Default ws.size value for the getPublishedBinaries request. */
+static const int DEFAULT_WS_SIZE = 150;
+
 static const char *QUERY_GET_DOWNLOAD_COUNT = "?ws.op=getDownloadCount";
 static const char *
 QUERY_GET_DAILY_DOWNLOAD_TOTALS = "?ws.op=getDailyDownloadTotals";
@@ -58,41 +60,139 @@ static json_object *get_json_object(const char *url)
        return NULL;
 }
 
-#define json_object_to_bpph_list \
-json_object_to_binary_package_publishing_history_list
+static char *get_bpph_list_cache_key(const char *archive_url)
+{
+       char *key;
+
+       key = malloc(strlen(archive_url + 7) + strlen("/bpph") + 1);
+       sprintf(key, "%s/bpph", archive_url + 7);
 
-struct binary_package_publishing_history * *
-get_binary_package_publishing_history_list(const char *archive_url,
-                                          const char *pkg_status)
+       return key;
+}
+
+static struct bpph **get_bpph_list_from_cache(const char *key)
 {
-       struct json_object *o_next;
+       char *content;
+       struct bpph **list;
+       json_object *json;
+
+       content = fcache_get(key);
+       if (!content)
+               return NULL;
+
+       json = json_tokener_parse(content);
+       if (!json)
+               return NULL;
+
+       list = json_object_to_bpph_list(json);
+
+       json_object_put(json);
+       free(content);
+
+       return list;
+}
+
+static char *get_last_creation_date(struct bpph **list)
+{
+       time_t last, t;
+       struct bpph **cur;
+
+       last = 0;
+
+       if (list)
+               for (cur = list; *cur; cur++) {
+                       t = (*cur)->date_created;
+                       if (t > last)
+                               last = t;
+               }
+
+       if (last)
+               return time_to_str(last);
+       else
+               return NULL;
+}
+
+/*
+ * 'archive_url': LP URL of the archive.
+ * 'size': size of the reply array. Between 1-300, else default value is used.
+ */
+static char *create_query_get_bpph(const char *archive_url,
+                                  const char *status,
+                                  int size)
+{
+       static const char *default_opt = "?ws.op=getPublishedBinaries&ws.size=";
+       static const char *status_opt = "&status=";
        char *url;
-       json_object *o;
-       void **result = NULL;
+       size_t n;
 
-       url = malloc(strlen(archive_url)+
-                    strlen(QUERY_GET_PUBLISHED_BINARIES)+
-                    (pkg_status ? strlen("&status=")+strlen(pkg_status) : 0)+
-                    1);
+       if (size < 1 || size > 300)
+               size = DEFAULT_WS_SIZE;
 
-       strcpy(url, archive_url);
-       strcat(url, QUERY_GET_PUBLISHED_BINARIES);
+       n = strlen(archive_url) + strlen(default_opt) + 3 + 1;
+
+       if (status)
+               n += strlen(status_opt) + strlen(status);
+
+       url = malloc(n);
+       sprintf(url, "%s%s%d", archive_url, default_opt, size);
+
+       if (status) {
+               strcat(url, status_opt);
+               strcat(url, status);
+       }
+
+       return url;
+}
+
+struct bpph **get_bpph_list(const char *archive_url,
+                           const char *pkg_status,
+                           int ws_size)
+{
+       char *url, *key, *tmp;
+       struct bpph **result;
+       struct json_object *o, *bpph_json, *o_next;
+       char *date;
+       int ok;
 
-       if (pkg_status) {
-               strcat(url, "&status=");
-               strcat(url, pkg_status);
+       url = create_query_get_bpph(archive_url, pkg_status, ws_size);
+
+       key = get_bpph_list_cache_key(archive_url);
+
+       result = get_bpph_list_from_cache(key);
+
+       if (result) {
+               date = get_last_creation_date(result);
+
+               if (date) {
+                       printf("Update package since: %s\n", date);
+
+                       tmp = malloc(strlen(url)
+                                    + strlen("&created_since_date=")
+                                    + strlen(date)+1);
+                       strcpy(tmp, url);
+                       strcat(tmp, "&created_since_date=");
+                       strcat(tmp, date);
+
+                       free(url);
+                       url = tmp;
+
+                       free(date);
+               }
        }
 
+       ok = 1;
        while (url) {
                o = get_json_object(url);
                free(url);
                url = NULL;
 
-               if (!o)
+               if (!o) {
+                       ok = 0;
                        break;
+               }
 
-               result = list_append_list(result,
-                                         (void **)json_object_to_bpph_list(o));
+               result = bpph_list_append_list(result,
+                                              json_object_to_bpph_list(o));
 
                o_next = json_object_object_get(o, "next_collection_link");
 
@@ -102,7 +202,15 @@ get_binary_package_publishing_history_list(const char *archive_url,
                json_object_put(o);
        }
 
-       return (struct binary_package_publishing_history **)result;
+       if (ok) {
+               bpph_json = bpph_list_to_json(result);
+               fcache_put(key, json_object_to_json_string(bpph_json));
+               json_object_put(bpph_json);
+       }
+
+       free(key);
+
+       return result;
 }
 
 int get_download_count(const char *archive_url)
@@ -139,7 +247,7 @@ const struct distro_arch_series *get_distro_arch_series(const char *url)
                return (struct distro_arch_series *)distro;
 
        content = get_url_content(url, 1);
-       
+
        if (!content)
                return NULL;