Skip to content

Commit

Permalink
Merge pull request #100 from keithj/replicates
Browse files Browse the repository at this point in the history
Report timestamps for all replicates
  • Loading branch information
Iain Bancarz committed Dec 3, 2014
2 parents 99fb1d9 + c247794 commit e0f73d7
Show file tree
Hide file tree
Showing 6 changed files with 129 additions and 86 deletions.
5 changes: 3 additions & 2 deletions src/baton.c
Original file line number Diff line number Diff line change
Expand Up @@ -776,7 +776,7 @@ json_t *list_timestamps(rcComm_t *conn, rodsPath_t *rods_path,
rods_path->outPath);
query_in = make_query_input(SEARCH_MAX_ROWS, obj_format.num_columns,
obj_format.columns);
query_in = prepare_obj_tps_list(query_in, rods_path);
query_in = prepare_obj_list(query_in, rods_path, NULL);
break;

case COLL_OBJ_T:
Expand Down Expand Up @@ -1043,7 +1043,8 @@ static json_t *list_data_object(rcComm_t *conn, rodsPath_t *rods_path,
if (flags & PRINT_SIZE) {
obj_format = &(query_format_in_t)
{ .num_columns = 3,
.columns = { COL_COLL_NAME, COL_DATA_NAME, COL_DATA_SIZE },
.columns = { COL_COLL_NAME, COL_DATA_NAME,
COL_DATA_SIZE },
.labels = { JSON_COLLECTION_KEY, JSON_DATA_OBJECT_KEY,
JSON_SIZE_KEY } };
}
Expand Down
55 changes: 26 additions & 29 deletions src/json.c
Original file line number Diff line number Diff line change
Expand Up @@ -524,41 +524,38 @@ char *json_to_local_path(json_t *object, baton_error_t *error) {
if (error->code != 0) goto error;
const char *filename = get_file_value(object, error);
if (error->code != 0) goto error;
//const char *collection = get_collection_value(object, error);
//if (error->code != 0) goto error;
const char *data_object = get_data_object_value(object, error);
if (error->code != 0) goto error;

if (represents_directory(object) && !represents_data_object(object)) {
// A collection to local directory mapping
if (directory && filename) {
path = make_file_path(directory, filename, error);
}
else if (directory && data_object) {
path = make_file_path(directory, data_object, error);
}
/* else if (collection && filename) { */
/* path = make_file_path(collection, filename, error); */
/* } */
/* else if (collection && data_object) { */
/* path = make_file_path(collection, data_object, error); */
/* } */
else if (filename) {
path = make_file_path(".", filename, error);
}
else if (data_object) {
path = make_file_path(".", data_object, error);
}
else if (directory) {
path = make_dir_path(directory, error);
if (error->code != 0) goto error;
}
else {
// All these are data object to local file mappings
if (represents_directory(object) && represents_data_object(object)) {
// No local filename; use the data object name as
// surrogate filename
const char *surrogate = get_data_object_value(object, error);
if (error->code != 0) goto error;
path = make_file_path(directory, surrogate, error);
if (error->code != 0) goto error;
}
else if (represents_file(object)) {
// Both local directory and filename specified
path = make_file_path(directory, filename, error);
if (error->code != 0) goto error;
}
else if (!directory && filename) {
// No local directory, use CWD as surrogate
path = make_file_path(".", filename, error);
if (error->code != 0) goto error;
}
else if (!filename) {
// No local filename, use data object name as surrogate
const char *surrogate = get_data_object_value(object, error);
if (error->code != 0) goto error;
path = make_file_path(".", surrogate, error);
if (error->code != 0) goto error;
}
path = make_dir_path(".", error);
}

if (error->code != 0) goto error;

return path;

error:
Expand Down
85 changes: 42 additions & 43 deletions src/json_query.c
Original file line number Diff line number Diff line change
Expand Up @@ -507,14 +507,7 @@ json_t *add_tps_json_object(rcComm_t *conn, json_t *object,
rodsPath_t rods_path;
char *path = NULL;
json_t *raw_timestamps = NULL;

json_t *timestamps;
const char *created;
const char *modified;

size_t selected_index = 0;
int selected_repl = -1;
int status;
json_t *timestamps = NULL;

if (!json_is_object(object)) {
set_baton_error(error, CAT_INVALID_ARGUMENT,
Expand All @@ -525,7 +518,7 @@ json_t *add_tps_json_object(rcComm_t *conn, json_t *object,
path = json_to_path(object, error);
if (error->code != 0) goto error;

status = set_rods_path(conn, &rods_path, path);
int status = set_rods_path(conn, &rods_path, path);
if (status < 0) {
set_baton_error(error, status, "Failed to set iRODS path '%s'", path);
goto error;
Expand All @@ -534,17 +527,21 @@ json_t *add_tps_json_object(rcComm_t *conn, json_t *object,
raw_timestamps = list_timestamps(conn, &rods_path, error);
if (error->code != 0) goto error;

// For data objects, we filter the results to present only the
// lowest replicate number. The iRODS generic query API doesn't
// permit this selection at the query level.
timestamps = json_array();
if (!timestamps) {
set_baton_error(error, -1, "Failed to allocate a new JSON array");
goto error;
}

// We report timestamps only on data objects. They exist on
// collections too, but we don't report them to be consistent with
// the 'ils' command.
if (represents_data_object(object)) {
size_t index;
json_t *timestamps;
int base = 10;

json_array_foreach(raw_timestamps, index, timestamps) {
const char *repl_str = get_replicate_num(timestamps, error);
size_t i;
json_t *item;
json_array_foreach(raw_timestamps, i, item) {
const char *repl_str = get_replicate_num(item, error);
if (error->code != 0) goto error;

char *endptr;
Expand All @@ -556,32 +553,30 @@ json_t *add_tps_json_object(rcComm_t *conn, json_t *object,
goto error;
}

if (index == 0 || repl_num < selected_repl) {
selected_repl = repl_num;
selected_index = index;
}
}
const char *created = get_created_timestamp(item, error);
if (error->code != 0) goto error;
const char *modified = get_modified_timestamp(item, error);
if (error->code != 0) goto error;

logmsg(DEBUG, "Adding timestamps from replicate %d of '%s'",
selected_repl, path);
}
json_t *iso_created =
make_timestamp(JSON_CREATED_KEY, created, ISO8601_FORMAT,
&repl_num, error);
if (error->code != 0) goto error;

timestamps = json_array_get(raw_timestamps, selected_index);
json_t *iso_modified =
make_timestamp(JSON_MODIFIED_KEY, modified, ISO8601_FORMAT,
&repl_num, error);
if (error->code != 0) goto error;

created = get_created_timestamp(timestamps, error);
if (error->code != 0) goto error;
modified = get_modified_timestamp(timestamps, error);
if (error->code != 0) goto error;
json_array_append_new(timestamps, iso_created);
json_array_append_new(timestamps, iso_modified);

int *repl_ptr;
if (selected_repl < 0) {
repl_ptr = NULL;
}
else {
repl_ptr = &selected_repl;
logmsg(DEBUG, "Adding timestamps from replicate %d of '%s'",
repl_num, path);
}
}

add_timestamps(object, created, modified, repl_ptr, error);
json_object_set_new(object, JSON_TIMESTAMPS_KEY, timestamps);
if (error->code != 0) goto error;

if (path) free(path);
Expand All @@ -595,6 +590,7 @@ json_t *add_tps_json_object(rcComm_t *conn, json_t *object,
if (path) free(path);
if (rods_path.rodsObjStat) free(rods_path.rodsObjStat);
if (raw_timestamps) json_decref(raw_timestamps);
if (timestamps) json_decref(timestamps);

return NULL;
}
Expand All @@ -607,8 +603,9 @@ json_t *add_tps_json_array(rcComm_t *conn, json_t *array,
goto error;
}

for (size_t i = 0; i < json_array_size(array); i++) {
json_t *item = json_array_get(array, i);
size_t i;
json_t *item;
json_array_foreach(array, i, item) {
add_tps_json_object(conn, item, error);
if (error->code != 0) goto error;
}
Expand Down Expand Up @@ -668,8 +665,9 @@ json_t *add_avus_json_array(rcComm_t *conn, json_t *array,
goto error;
}

for (size_t i = 0; i < json_array_size(array); i++) {
json_t *item = json_array_get(array, i);
size_t i;
json_t *item;
json_array_foreach(array, i, item) {
add_avus_json_object(conn, item, error);
if (error->code != 0) goto error;
}
Expand Down Expand Up @@ -729,8 +727,9 @@ json_t *add_acl_json_array(rcComm_t *conn, json_t *array,
goto error;
}

for (size_t i = 0; i < json_array_size(array); i++) {
json_t *item = json_array_get(array, i);
size_t i;
json_t *item;
json_array_foreach(array, i, item) {
add_acl_json_object(conn, item, error);
if (error->code != 0) goto error;
}
Expand Down
10 changes: 0 additions & 10 deletions src/query.c
Original file line number Diff line number Diff line change
Expand Up @@ -266,16 +266,6 @@ genQueryInp_t *prepare_col_acl_list(genQueryInp_t *query_in,
return add_query_conds(query_in, num_conds, (query_cond_t []) { cn, tn });
}

genQueryInp_t *prepare_obj_tps_list(genQueryInp_t *query_in,
rodsPath_t *rods_path) {
char *data_id = rods_path->dataId;
query_cond_t di = { .column = COL_DATA_ACCESS_DATA_ID,
.operator = SEARCH_OP_EQUALS,
.value = data_id };
size_t num_conds = 1;
return add_query_conds(query_in, num_conds, (query_cond_t []) { di });
}

genQueryInp_t *prepare_col_tps_list(genQueryInp_t *query_in,
rodsPath_t *rods_path) {
char *path = rods_path->outPath;
Expand Down
4 changes: 2 additions & 2 deletions src/query.h
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ genQueryInp_t *prepare_obj_acl_list(genQueryInp_t *query_in,
genQueryInp_t *prepare_col_acl_list(genQueryInp_t *query_in,
rodsPath_t *rods_path);

genQueryInp_t *prepare_obj_tps_list(genQueryInp_t *query_in,
rodsPath_t *rods_path);
//genQueryInp_t *prepare_obj_tps_list(genQueryInp_t *query_in,
// rodsPath_t *rods_path);

genQueryInp_t *prepare_col_tps_list(genQueryInp_t *query_in,
rodsPath_t *rods_path);
Expand Down
56 changes: 56 additions & 0 deletions tests/check_baton.c
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/

#include <assert.h>
#include <limits.h>
#include <unistd.h>

#include <jansson.h>
Expand Down Expand Up @@ -170,6 +171,16 @@ START_TEST(test_parse_timestamp) {
}
END_TEST

// Can we parse file size strings?
START_TEST(test_parse_size) {
ck_assert_int_eq(0, parse_size("0"));

char max[1024];
snprintf(max, sizeof max, "%lu", ULONG_MAX);
ck_assert_int_eq(ULONG_MAX, parse_size(max));
}
END_TEST

// Can we coerce ISO-8859-1 to UTF-8?
START_TEST(test_to_utf8) {
char in[2] = { 0, 0 };
Expand Down Expand Up @@ -1422,6 +1433,49 @@ START_TEST(test_json_to_path) {
}
END_TEST

// Can we convert JSON representation to a useful local path string?
START_TEST(test_json_to_local_path) {
const char *file_name = "file1.txt";
const char *file_path = "/file1/path";

const char *obj_name = "obj1.txt";
const char *coll_path = "/obj/path";

json_t *path1 = json_pack("{s:s s:s s:s s:s}",
JSON_DIRECTORY_KEY, file_path,
JSON_FILE_KEY, file_name,
JSON_COLLECTION_KEY, coll_path,
JSON_DATA_OBJECT_KEY, obj_name);

baton_error_t error_path1;
ck_assert_str_eq(json_to_local_path(path1, &error_path1),
"/file1/path/file1.txt");
ck_assert_int_eq(error_path1.code, 0);
json_decref(path1);

json_t *path2 = json_pack("{s:s s:s s:s}",
JSON_FILE_KEY, file_name,
JSON_COLLECTION_KEY, coll_path,
JSON_DATA_OBJECT_KEY, obj_name);

baton_error_t error_path2;
ck_assert_str_eq(json_to_local_path(path1, &error_path2),
"./file1.txt");
ck_assert_int_eq(error_path2.code, 0);
json_decref(path2);

json_t *path3 = json_pack("{s:s s:s}",
JSON_COLLECTION_KEY, coll_path,
JSON_DATA_OBJECT_KEY, obj_name);

baton_error_t error_path3;
ck_assert_str_eq(json_to_local_path(path1, &error_path3),
"./obj1.txt");
ck_assert_int_eq(error_path3.code, 0);
json_decref(path3);
}
END_TEST

// Can we test JSON for the presence of an AVU?
START_TEST(test_contains_avu) {
json_t *avu1 = json_pack("{s:s, s:s}",
Expand Down Expand Up @@ -1706,6 +1760,7 @@ Suite *baton_suite(void) {
tcase_add_test(utilities_tests, test_maybe_stdin);
tcase_add_test(utilities_tests, test_format_timestamp);
tcase_add_test(utilities_tests, test_parse_timestamp);
tcase_add_test(utilities_tests, test_parse_size);
tcase_add_test(utilities_tests, test_to_utf8);

TCase *basic_tests = tcase_create("basic");
Expand Down Expand Up @@ -1753,6 +1808,7 @@ Suite *baton_suite(void) {
tcase_add_test(basic_tests, test_represents_file);

tcase_add_test(basic_tests, test_json_to_path);
tcase_add_test(basic_tests, test_json_to_local_path);
tcase_add_test(basic_tests, test_contains_avu);

tcase_add_test(basic_tests, test_get_user);
Expand Down

0 comments on commit e0f73d7

Please sign in to comment.