提交 5b8a3003 编写于 作者: A Adam Lee

s3ext: add s3interface and s3restful classes

s3interface is to handle all s3 restful api wrapper, like listBucket()
and fetchData(), s3restful is to handle HTTP GET, and PUT in future.
上级 122a8dfa
......@@ -16,7 +16,7 @@ endif
# Targets
MODULE_big = gps3ext
OBJS = lib/http_parser.o lib/ini.o src/gps3ext.o src/s3conf.o src/s3common.o src/gpreader.o src/s3utils.o src/s3log.o src/s3url_parser.o src/s3http_headers.o src/s3thread.o src/s3extbase.o src/s3reader.o src/s3interface.o
OBJS = lib/http_parser.o lib/ini.o src/gps3ext.o src/s3conf.o src/s3common.o src/gpreader.o src/s3utils.o src/s3log.o src/s3url_parser.o src/s3http_headers.o src/s3thread.o src/s3extbase.o src/s3reader.o src/s3interface.o src/s3restful_service.o
# Launch
PGXS := $(shell pg_config --pgxs)
......@@ -31,7 +31,7 @@ lint:
tags:
@make -f Makefile.others tags
test:
test: format
@make -f Makefile.others test
coverage:
......
......@@ -16,7 +16,7 @@ endif
# Targets
PROGRAM = gpcheckcloud
OBJS = src/gpcheckcloud.o src/s3conf.o src/gpreader.o src/s3utils.o src/s3log.o src/s3common.o lib/http_parser.o lib/ini.o src/s3url_parser.o src/s3http_headers.o src/s3thread.o src/s3extbase.o src/s3reader.o src/s3interface.o
OBJS = src/gpcheckcloud.o src/s3conf.o src/gpreader.o src/s3utils.o src/s3log.o src/s3common.o lib/http_parser.o lib/ini.o src/s3url_parser.o src/s3http_headers.o src/s3thread.o src/s3extbase.o src/s3reader.o src/s3interface.o src/s3restful_service.o
# Launch
PGXS := $(shell pg_config --pgxs)
......
# Options
DEBUG_S3_CURL = n
ARCH=$(shell uname -s)
GCOV=gcov
# Flags
CPP = g++
INCLUDES = -Isrc -Iinclude -Ilib -I/usr/include/libxml2
LDFLAGS = -lpthread -lcrypto -lcurl -lxml2 -lgcov -lz
LDFLAGS = -lpthread -lcrypto -lcurl -lxml2 -lz
CPPFLAGS = -O2 -g3 -std=c++98 -Wall -fPIC -DS3_STANDALONE -fprofile-arcs -ftest-coverage
ifeq "$(ARCH)" "Darwin"
LDFLAGS += -coverage
GCOV += -a
else
LDFLAGS += -lgcov
GCOV += -r
endif
ifeq ($(DEBUG_S3_CURL),y)
CPPFLAGS += -DDEBUG_S3_CURL
endif
......@@ -14,7 +24,7 @@ endif
all: test
# Google TEST
TEST_SRC_FILES = test/s3conf_test.cpp test/s3utils_test.cpp test/gpreader_test.cpp test/s3common_test.cpp test/s3log_test.cpp test/s3url_parser_test.cpp test/s3http_headers_test.cpp test/s3thread_test.cpp test/s3reader_test.cpp test/s3bucket_reader_test.cpp test/s3interface_test.cpp
TEST_SRC_FILES = test/s3conf_test.cpp test/s3utils_test.cpp test/gpreader_test.cpp test/s3common_test.cpp test/s3log_test.cpp test/s3url_parser_test.cpp test/s3http_headers_test.cpp test/s3thread_test.cpp test/s3reader_test.cpp test/s3bucket_reader_test.cpp test/s3interface_test.cpp test/s3restful_service_test.cpp
TEST_OBJS = $(TEST_SRC_FILES:.cpp=.o)
TEST_APP = s3test
......@@ -48,10 +58,11 @@ $(TEST_APP): $(TEST_OBJS) gtest_main.a
$(CPP) $(CPPFLAGS) $(INCLUDES) -MMD -MP -c $< -o $@
test: $(TEST_APP)
-rm -rf *.gcda *.gcov test/*.gcda test/*.gcov
-@./$(TEST_APP)
coverage: test
@gcov -r $(TEST_SRC_FILES) | grep -v test.cpp | grep -A 2 "src/.*.cpp"
@$(GCOV) $(TEST_SRC_FILES) | grep -v test.cpp | grep -A 2 "src/.*.cpp"
clean:
rm -f *.gcov src/*.gcov src/*.gcda src/*.gcno
......@@ -61,13 +72,13 @@ distclean: clean
rm -f tags cscope.*
tags:
ctags --c++-kinds=+p --fields=+iaS --extra=+q src/*.cpp test/*.cpp include/*.h
cscope -bq src/*.cpp test/*.cpp include/*.h
ctags --c++-kinds=+p --fields=+iaS --extra=+q src/*.cpp test/*.cpp include/*.h lib/*.cpp lib/*.h
cscope -bq src/*.cpp test/*.cpp include/*.h lib/*.cpp lib/*.h
lint:
cppcheck -v --enable=warning src/*.cpp test/*.cpp include/*.h
format:
clang-format -style="{BasedOnStyle: Google, IndentWidth: 4}" -i src/*.cpp test/*.cpp include/*.h
clang-format -style="{BasedOnStyle: Google, IndentWidth: 4, ColumnLimit: 100}" -i src/*.cpp test/*.cpp include/*.h
.PHONY: buildtest test coverage clean distclean tags lint format
......@@ -9,7 +9,8 @@ class Reader {
virtual void open(const ReaderParams &params) = 0;
// read() attempts to read up to count bytes into the buffer starting at buffer.
// read() attempts to read up to count bytes into the buffer starting at
// buffer.
// Return 0 if EOF. Throw exception if encounters errors.
virtual uint64_t read(char *buf, uint64_t count) = 0;
......
......@@ -4,83 +4,51 @@
using std::string;
class ReaderParams {
public:
ReaderParams() {};
virtual ~ReaderParams() {};
uint64_t getChunkSize() const {
return chunkSize;
}
void setChunkSize(uint64_t chunkSize) {
this->chunkSize = chunkSize;
}
const S3Credential& getCred() const {
return cred;
}
void setCred(const S3Credential& cred) {
this->cred = cred;
}
const string& getKeyUrl() const {
return keyUrl;
}
void setKeyUrl(const string& keyUrl) {
this->keyUrl = keyUrl;
}
const string& getRegion() const {
return region;
}
void setRegion(const string& region) {
this->region = region;
}
uint64_t getSize() const {
return size;
}
void setSize(uint64_t size) {
this->size = size;
}
const string& getUrl() const {
return url;
}
void setUrl(const string& url) {
this->url = url;
}
int getSegId() const {
return segId;
}
void setSegId(int segId) {
this->segId = segId;
}
int getSegNum() const {
return segNum;
}
void setSegNum(int segNum) {
this->segNum = segNum;
}
private:
string url; // original url to read/write.
string keyUrl; // key url in s3 bucket.
string region;
uint64_t size; // key/file size.
uint64_t chunkSize; // chunk size
S3Credential cred;
int segId;
int segNum;
public:
ReaderParams(){};
virtual ~ReaderParams(){};
uint64_t getChunkSize() const { return chunkSize; }
void setChunkSize(uint64_t chunkSize) { this->chunkSize = chunkSize; }
const S3Credential& getCred() const { return cred; }
void setCred(const S3Credential& cred) { this->cred = cred; }
const string& getKeyUrl() const { return keyUrl; }
void setKeyUrl(const string& keyUrl) { this->keyUrl = keyUrl; }
const string& getRegion() const { return region; }
void setRegion(const string& region) { this->region = region; }
uint64_t getSize() const { return size; }
void setSize(uint64_t size) { this->size = size; }
const string& getUrl() const { return url; }
void setUrl(const string& url) { this->url = url; }
int getSegId() const { return segId; }
void setSegId(int segId) { this->segId = segId; }
int getSegNum() const { return segNum; }
void setSegNum(int segNum) { this->segNum = segNum; }
private:
string url; // original url to read/write.
string keyUrl; // key url in s3 bucket.
string region;
uint64_t size; // key/file size.
uint64_t chunkSize; // chunk size
S3Credential cred;
int segId;
int segNum;
};
#endif /* INCLUDE_READER_PARAMS_H_ */
#ifndef INCLUDE_RESTFUL_SERVICE_H_
#define INCLUDE_RESTFUL_SERVICE_H_
#include <stdint.h>
#include <map>
#include <string>
#include <vector>
#include "s3http_headers.h"
using std::string;
using std::vector;
using std::map;
enum ResponseStatus {
OK,
FAIL,
};
class Response {
public:
Response() : status(FAIL) {}
Response(ResponseStatus status, const vector<uint8_t>& buf) : status(status), buffer(buf) {}
bool isSuccess() { return status == OK; }
vector<uint8_t>& getRawData() { return buffer; }
const string& getMessage() const { return message; }
void setMessage(const string& message) { this->message = message; }
const ResponseStatus& getStatus() const { return status; }
void setStatus(const ResponseStatus& status) { this->status = status; }
void appendBuffer(char* ptr, size_t size) {
// TODO: Fix eclipse warning.
buffer.insert(buffer.end(), ptr, ptr + size);
}
void clearBuffer() {
buffer.clear();
buffer.shrink_to_fit();
}
private:
// status is OK when get full HTTP response even response body may means request failure.
ResponseStatus status;
string message;
vector<uint8_t> buffer;
};
class RESTfulService {
public:
RESTfulService();
virtual ~RESTfulService();
virtual Response get(const string& url, HTTPHeaders& headers,
const map<string, string>& params) = 0;
};
#endif /* INCLUDE_RESTFUL_SERVICE_H_ */
\ No newline at end of file
......@@ -15,7 +15,7 @@ class S3BucketReader : public Reader {
S3BucketReader();
~S3BucketReader();
void open(const ReaderParams& params);
void open(const ReaderParams &params);
uint64_t read(char *buf, uint64_t count);
void close();
......@@ -23,7 +23,7 @@ class S3BucketReader : public Reader {
void setUpstreamReader(Reader *reader) { this->upstreamReader = reader; }
void validateURL();
ListBucketResult* listBucketWithRetry(int retries);
ListBucketResult *listBucketWithRetry(int retries);
protected:
// Get URL for a S3 object/file.
......@@ -48,13 +48,13 @@ class S3BucketReader : public Reader {
bool needNewReader;
ListBucketResult *keyList; // List of matched keys/files.
unsigned int keyIndex; // BucketContent index of keylist->contents.
unsigned int keyIndex; // BucketContent index of keylist->contents.
void SetSchema();
void SetRegion();
void SetBucketAndPrefix();
BucketContent* getNextKey();
const ReaderParams& getReaderParams(BucketContent* key);
BucketContent *getNextKey();
const ReaderParams &getReaderParams(BucketContent *key);
};
#endif
\ No newline at end of file
......@@ -21,16 +21,14 @@ struct S3Credential {
enum Method { GET, PUT, POST, DELETE, HEAD };
bool SignRequestV4(const string& method, HTTPHeaders* h,
const string& orig_region, const string& path,
const string& query, const S3Credential& cred);
void SignRequestV4(const string& method, HTTPHeaders* h, const string& orig_region,
const string& path, const string& query, const S3Credential& cred);
struct XMLInfo {
xmlParserCtxtPtr ctxt;
};
uint64_t XMLParserCallback(void* contents, uint64_t size, uint64_t nmemb,
void* userp);
uint64_t XMLParserCallback(void* contents, uint64_t size, uint64_t nmemb, void* userp);
char* get_opt_s3(const char* url, const char* key);
......
#ifndef INCLUDE_S3INTERFACE_H_
#define INCLUDE_S3INTERFACE_H_
#include "restful_service.h"
#include "s3reader.h"
class S3Interface {
......@@ -8,10 +9,8 @@ class S3Interface {
virtual ~S3Interface() {}
// It is caller's responsibility to free returned memory.
virtual ListBucketResult* ListBucket(const string& schema,
const string& region,
const string& bucket,
const string& prefix,
virtual ListBucketResult* ListBucket(const string& schema, const string& region,
const string& bucket, const string& prefix,
const S3Credential& cred) = 0;
};
......@@ -19,9 +18,23 @@ class S3Service : public S3Interface {
public:
S3Service();
virtual ~S3Service();
ListBucketResult* ListBucket(const string& schema, const string& region,
const string& bucket, const string& prefix,
const S3Credential& cred);
ListBucketResult* ListBucket(const string& schema, const string& region, const string& bucket,
const string& prefix, const S3Credential& cred);
void setRESTfulService(RESTfulService* service) { this->service = service; }
private:
string getUrl(const string& prefix, const string& schema, const string& host,
const string& bucket, const string& marker);
void parseBucketXML(ListBucketResult* result, xmlNode* root_element, string& marker);
xmlParserCtxtPtr getBucketXML(const string& region, const string& url, const string& prefix,
const S3Credential& cred, const string& marker);
bool checkAndParseBucketXML(ListBucketResult* result, xmlParserCtxtPtr xmlcontext,
string& marker);
HTTPHeaders composeHTTPHeaders(const string& url, const string& marker, const string& prefix,
const string& region, const S3Credential& cred);
RESTfulService* service;
};
#endif /* INCLUDE_S3INTERFACE_H_ */
......@@ -2,9 +2,9 @@
#define INCLUDE_S3KEYREADER_H_
class S3KeyReader : public Reader {
public:
S3KeyReader();
virtual ~S3KeyReader();
public:
S3KeyReader();
virtual ~S3KeyReader();
};
#endif /* INCLUDE_S3KEYREADER_H_ */
......@@ -27,8 +27,7 @@ LOGLEVEL getLogLevel(const char* v);
#define PRINTFUNCTION(i, format, ...) LogMessage(i, format, __VA_ARGS__)
#define LOG_FMT "[%s]#%d#(%0X)%s:%d "
#define LOG_ARGS(LOGLEVELSTR) \
LOGLEVELSTR, s3ext_segid, pthread_self(), __FILE__, __LINE__
#define LOG_ARGS(LOGLEVELSTR) LOGLEVELSTR, s3ext_segid, pthread_self(), __FILE__, __LINE__
#define NEWLINE "\n"
#define S3DEBUG(message, args...) \
......
......@@ -13,34 +13,31 @@ using std::stringstream;
extern void StringAppendPrintf(std::string *output, const char *format, ...);
#define CHECK_OR_DIE_MSG(_condition, _format, _args...) \
do { \
if (!(_condition)) { \
std::string _error_str; \
StringAppendPrintf(&_error_str, _format, _args); \
std::stringstream _err_msg; \
_err_msg << _error_str << ", Function: " << __func__ \
<< ", File: " << __FILE__ << "(" << __LINE__ << "). "; \
throw std::runtime_error(_err_msg.str()); \
} \
#define CHECK_OR_DIE_MSG(_condition, _format, _args...) \
do { \
if (!(_condition)) { \
std::string _error_str; \
StringAppendPrintf(&_error_str, _format, _args); \
std::stringstream _err_msg; \
_err_msg << _error_str << ", Function: " << __func__ << ", File: " << __FILE__ << "(" \
<< __LINE__ << "). "; \
throw std::runtime_error(_err_msg.str()); \
} \
} while (false)
#define CHECK_ARG_OR_DIE(_arg) \
CHECK_OR_DIE_MSG(_arg, "Null pointer of argument: '%s'", #_arg)
#define CHECK_OR_DIE(_condition) \
do { \
if (!(_condition)) { \
std::stringstream _err_msg; \
_err_msg << "Failed expression: (" << #_condition \
<< "), Function: " << __func__ << ", File: " << __FILE__ \
<< "(" << __LINE__ << "). "; \
throw std::runtime_error(_err_msg.str()); \
} \
#define CHECK_ARG_OR_DIE(_arg) CHECK_OR_DIE_MSG(_arg, "Null pointer of argument: '%s'", #_arg)
#define CHECK_OR_DIE(_condition) \
do { \
if (!(_condition)) { \
std::stringstream _err_msg; \
_err_msg << "Failed expression: (" << #_condition << "), Function: " << __func__ \
<< ", File: " << __FILE__ << "(" << __LINE__ << "). "; \
throw std::runtime_error(_err_msg.str()); \
} \
} while (false)
inline void VStringAppendPrintf(string *output, const char *format,
va_list argp) {
inline void VStringAppendPrintf(string *output, const char *format, va_list argp) {
CHECK_OR_DIE(output);
char buffer[BUFFER_LEN];
vsnprintf(buffer, BUFFER_LEN, format, argp);
......
......@@ -19,9 +19,11 @@
#include <zlib.h>
#include "s3common.h"
#include "s3macros.h"
#include "s3url_parser.h"
using std::vector;
using std::stringstream;
struct Range {
/* data */
......@@ -52,8 +54,8 @@ class OffsetMgr {
class BlockingBuffer {
public:
static BlockingBuffer* CreateBuffer(const string& url, const string& region,
OffsetMgr* o, S3Credential* pcred);
static BlockingBuffer* CreateBuffer(const string& url, const string& region, OffsetMgr* o,
S3Credential* pcred);
BlockingBuffer(const string& url, OffsetMgr* o);
virtual ~BlockingBuffer();
bool Init();
......@@ -98,8 +100,8 @@ class Downloader {
public:
Downloader(uint8_t part_num);
~Downloader();
bool init(const string& url, const string& region, uint64_t size,
uint64_t chunksize, S3Credential* pcred);
bool init(const string& url, const string& region, uint64_t size, uint64_t chunksize,
S3Credential* pcred);
bool get(char* buf, uint64_t& len);
void destroy();
......@@ -138,7 +140,7 @@ class HTTPFetcher : public BlockingBuffer {
protected:
uint64_t fetchdata(uint64_t offset, char* data, uint64_t len);
virtual bool processheader() { return true; };
virtual void signHeader() {}
CURL* curl;
Method method;
HTTPHeaders headers;
......@@ -147,12 +149,11 @@ class HTTPFetcher : public BlockingBuffer {
class S3Fetcher : public HTTPFetcher {
public:
S3Fetcher(const string& url, const string& region, OffsetMgr* o,
const S3Credential& cred);
S3Fetcher(const string& url, const string& region, OffsetMgr* o, const S3Credential& cred);
~S3Fetcher(){};
protected:
virtual bool processheader();
virtual void signHeader();
private:
string region;
......@@ -161,43 +162,30 @@ class S3Fetcher : public HTTPFetcher {
struct BucketContent;
// To avoid double delete and core dump, always use new to create
// ListBucketResult object,
// unless we upgrade to c++ 11. Reason is we delete ListBucketResult in close()
// explicitly.
struct ListBucketResult {
string Name;
string Prefix;
unsigned int MaxKeys;
vector<BucketContent *> contents;
vector<BucketContent*> contents;
~ListBucketResult();
};
BucketContent* CreateBucketContentItem(const string& key, uint64_t size);
struct BucketContent {
friend BucketContent* CreateBucketContentItem(const string& key,
uint64_t size);
BucketContent();
~BucketContent();
BucketContent() : name(""), size(0) {}
BucketContent(string name, uint64_t size) {
this->name = name;
this->size = size;
}
~BucketContent() {}
string getName() const { return this->name; };
uint64_t getSize() const { return this->size; };
private:
// BucketContent(const BucketContent& b) = delete;
// BucketContent operator=(const BucketContent& b) = delete;
string name;
uint64_t size;
};
xmlParserCtxtPtr DoGetXML(const string &region, const string &url,
const string &prefix, const S3Credential &cred,
const string &marker);
bool extractContent(ListBucketResult* result, xmlNode* root_element,
string& marker);
// It is caller's responsibility to free returned memory.
ListBucketResult* ListBucket(const string& schema, const string& region,
const string& bucket, const string& prefix,
const S3Credential& cred);
#endif
#ifndef INCLUDE_S3RESTUL_SERVICE_H_
#define INCLUDE_S3RESTUL_SERVICE_H_
#include "restful_service.h"
class S3RESTfulService : public RESTfulService {
public:
S3RESTfulService();
virtual ~S3RESTfulService();
Response get(const string& url, HTTPHeaders& headers, const map<string, string>& params);
};
#endif /* INCLUDE_S3RESTUL_SERVICE_H_ */
......@@ -35,17 +35,15 @@ struct Uploader {
// pthread_t* threads;
};
const char *GetUploadId(const char *host, const char *bucket,
const char *obj_name, const S3Credential &cred);
const char *GetUploadId(const char *host, const char *bucket, const char *obj_name,
const S3Credential &cred);
const char *PartPutS3Object(const char *host, const char *bucket,
const char *obj_name, const S3Credential &cred,
const char *data, uint64_t data_size,
const char *PartPutS3Object(const char *host, const char *bucket, const char *obj_name,
const S3Credential &cred, const char *data, uint64_t data_size,
uint64_t part_number, const char *upload_id);
bool CompleteMultiPutS3(const char *host, const char *bucket,
const char *obj_name, const char *upload_id,
const char **etag_array, uint64_t count,
bool CompleteMultiPutS3(const char *host, const char *bucket, const char *obj_name,
const char *upload_id, const char **etag_array, uint64_t count,
const S3Credential &cred);
#endif
......@@ -13,8 +13,7 @@ class UrlParser {
/* data */
private:
char* extractField(const struct http_parser_url* u,
http_parser_url_fields i);
char* extractField(const struct http_parser_url* u, http_parser_url_fields i);
char* schema;
char* host;
char* path;
......
......@@ -20,21 +20,17 @@ bool gethttpnow(char datebuf[65]);
bool trim(char* out, const char* in, const char* trimed = " \t\r\n");
bool sha1hmac(const char* str, unsigned char out_hash[20], const char* secret,
int secret_len);
bool sha1hmac(const char* str, unsigned char out_hash[20], const char* secret, int secret_len);
bool sha1hmac_hex(const char* str, char out_hash_hex[41], const char* secret,
int secret_len);
bool sha1hmac_hex(const char* str, char out_hash_hex[41], const char* secret, int secret_len);
bool sha256(const char* string, unsigned char out_hash[32]);
bool sha256_hex(const char* string, char out_hash_hex[65]);
bool sha256hmac(const char* str, unsigned char out_hash[32], const char* secret,
int secret_len);
bool sha256hmac(const char* str, unsigned char out_hash[32], const char* secret, int secret_len);
bool sha256hmac_hex(const char* str, char out_hash_hex[65], const char* secret,
int secret_len);
bool sha256hmac_hex(const char* str, char out_hash_hex[65], const char* secret, int secret_len);
size_t find_Nth(const string& str, // where to work
unsigned N, // N'th ocurrence
......@@ -77,10 +73,8 @@ class Config {
public:
Config(const string& filename);
~Config();
string Get(const string& sec, const string& key,
const string& defaultvalue);
bool Scan(const string& sec, const string& key, const char* scanfmt,
void* dst);
string Get(const string& sec, const string& key, const string& defaultvalue);
bool Scan(const string& sec, const string& key, const char* scanfmt, void* dst);
void* Handle() { return (void*)this->_conf; };
private:
......
......@@ -22,9 +22,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -34,9 +32,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -46,9 +42,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -58,9 +52,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -70,9 +62,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -82,9 +72,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -94,9 +82,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -106,9 +92,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- ========
......@@ -118,9 +102,7 @@ create READABLE external table s3example (date text, time text, open float, high
\d s3example
SELECT count(*) FROM s3example;
SELECT sum(open) FROM s3example;
SELECT avg(open) FROM s3example;
-- =======
-- CLEANUP
......
-- ========
-- PROTOCOL
-- ========
-- create the database functions
CREATE OR REPLACE FUNCTION read_from_s3() RETURNS integer AS
'$libdir/gps3ext.so', 's3_import' LANGUAGE C STABLE;
-- declare the protocol name along with in/out funcs
CREATE PROTOCOL s3 (
readfunc = read_from_s3
);
-- Check out the catalog table
select * from pg_extprotocol;
-- ========
drop external table s3example;
create READABLE external table s3example (date text, time text, open float, high float,
low float, volume int) location('s3://s3-ap-northeast-1.amazonaws.com/apnortheast1.s3test.pivotal.io config=/home/gpadmin/s3.conf') FORMAT 'csv';
\d s3example
SELECT sum(open) FROM s3example;
-- ========
drop external table s3example;
create READABLE external table s3example (date text, time text, open float, high float,
low float, volume int) location('s3://s3-eu-central-1.amazonaws.com/eucentral1.s3test.pivotal.io/ config=/home/gpadmin/s3.conf') FORMAT 'csv';
\d s3example
SELECT sum(open) FROM s3example;
-- ========
drop external table s3example;
create READABLE external table s3example (date text, time text, open float, high float,
low float, volume int) location('s3://s3-sa-east-1.amazonaws.com/saeast1.s3test.pivotal.io/ config=/home/gpadmin/s3.conf') FORMAT 'csv';
\d s3example
SELECT sum(open) FROM s3example;
-- =======
-- CLEANUP
-- =======
DROP EXTERNAL TABLE s3example;
DROP PROTOCOL s3;
......@@ -79,9 +79,8 @@ bool read_config(const char *config) {
ListBucketResult *list_bucket(S3Reader *wrapper) {
S3Credential g_cred = {s3ext_accessid, s3ext_secret};
ListBucketResult *r =
ListBucket("https", wrapper->get_region(), wrapper->get_bucket(),
wrapper->get_prefix(), g_cred);
ListBucketResult *r = ListBucket("https", wrapper->get_region(), wrapper->get_bucket(),
wrapper->get_prefix(), g_cred);
return r;
}
......
......@@ -5,8 +5,8 @@
#include "gps3ext.h"
#include "s3conf.h"
#include "s3log.h"
#include "s3utils.h"
#include "s3macros.h"
#include "s3utils.h"
using std::string;
using std::stringstream;
......@@ -35,8 +35,8 @@ bool S3Reader::Init(int segid, int segnum, int chunksize) {
int initretry = 3;
while (initretry--) {
this->keylist = ListBucket(this->schema, this->region, this->bucket,
this->prefix, this->cred);
// TODO: refactor, ListBucket() here.
this->keylist = NULL;
if (!this->keylist) {
S3INFO("Can't get keylist from bucket %s", this->bucket.c_str());
......@@ -95,12 +95,12 @@ bool S3Reader::getNextDownloader() {
S3DEBUG("key: %s, size: %llu", keyurl.c_str(), c->getSize());
// 4. Initialize and kick off Downloader.
bool ok = filedownloader->init(keyurl, this->region, c->getSize(), this->chunksize,
&this->cred);
bool ok =
filedownloader->init(keyurl, this->region, c->getSize(), this->chunksize, &this->cred);
if (ok) {
// for now, every segment downloads its assigned files(mod)
// better to build a workqueue in case not all segments are available
this->contentindex += this->segnum;
// for now, every segment downloads its assigned files(mod)
// better to build a workqueue in case not all segments are available
this->contentindex += this->segnum;
} else {
delete this->filedownloader;
this->filedownloader = NULL;
......@@ -127,14 +127,14 @@ bool S3Reader::TransferData(char *data, uint64_t &len) {
}
RETRY:
uint64_t buflen = len;
uint64_t buflen = len;
bool ok = this->filedownloader->get(data, buflen);
if (!ok) {
S3ERROR("Failed to get data from file downloader");
return false;
}
if (buflen == 0) {
if (! this->getNextDownloader()) {
if (!this->getNextDownloader()) {
return false;
}
......
......@@ -59,8 +59,7 @@ Datum s3_import(PG_FUNCTION_ARGS) {
/* Must be called via the external table format manager */
if (!CALLED_AS_EXTPROTOCOL(fcinfo))
elog(ERROR,
"extprotocol_import: not called by external protocol manager");
elog(ERROR, "extprotocol_import: not called by external protocol manager");
/* Get our internal description of the protocol */
s3reader = (S3Reader *)EXTPROTOCOL_GET_USER_CTX(fcinfo);
......
#include "restful_service.h"
RESTfulService::RESTfulService() {
// TODO Auto-generated constructor stub
}
RESTfulService::~RESTfulService() {
// TODO Auto-generated destructor stub
}
\ No newline at end of file
......@@ -7,12 +7,12 @@
#include "gpreader.h"
#include "gps3ext.h"
#include "reader.h"
#include "reader_params.h"
#include "s3bucket_reader.h"
#include "s3conf.h"
#include "s3log.h"
#include "s3macros.h"
#include "s3utils.h"
#include "reader_params.h"
using std::string;
using std::stringstream;
......@@ -31,68 +31,67 @@ S3BucketReader::S3BucketReader() : Reader() {
this->needNewReader = true;
}
S3BucketReader::~S3BucketReader() {
this->close();
}
S3BucketReader::~S3BucketReader() { this->close(); }
void S3BucketReader::setS3interface(S3Interface *s3) { this->s3interface = s3; }
void S3BucketReader::open(const ReaderParams& params) {
this->url = params.getUrl();
this->segId = params.getSegId();
this->segNum = params.getSegNum();
this->cred = params.getCred();
void S3BucketReader::open(const ReaderParams &params) {
this->url = params.getUrl();
this->segId = params.getSegId();
this->segNum = params.getSegNum();
this->cred = params.getCred();
this->validateURL();
this->keyList = this->listBucketWithRetry(3);
return;
}
BucketContent* S3BucketReader::getNextKey() {
this->keyIndex = (this->keyIndex == (unsigned int)-1) ? this->segId : this->keyIndex + this->segNum;
BucketContent *S3BucketReader::getNextKey() {
this->keyIndex =
(this->keyIndex == (unsigned int)-1) ? this->segId : this->keyIndex + this->segNum;
if (this->keyIndex >= this->keyList->contents.size()) {
return NULL;
}
if (this->keyIndex >= this->keyList->contents.size()) {
return NULL;
}
return this->keyList->contents[this->keyIndex];
return this->keyList->contents[this->keyIndex];
}
const ReaderParams& S3BucketReader::getReaderParams(BucketContent* key) {
ReaderParams *params = new ReaderParams();
params->setKeyUrl(this->getKeyURL(key->getName()));
params->setRegion(this->region);
params->setSize(key->getSize());
params->setChunkSize(this->chunkSize);
S3DEBUG("key: %s, size: %" PRIu64, params->getKeyUrl().c_str(), params->getSize());
return *params;
const ReaderParams &S3BucketReader::getReaderParams(BucketContent *key) {
ReaderParams *params = new ReaderParams();
params->setKeyUrl(this->getKeyURL(key->getName()));
params->setRegion(this->region);
params->setSize(key->getSize());
params->setChunkSize(this->chunkSize);
S3DEBUG("key: %s, size: %" PRIu64, params->getKeyUrl().c_str(), params->getSize());
return *params;
}
uint64_t S3BucketReader::read(char *buf, uint64_t count) {
CHECK_OR_DIE(this->upstreamReader != NULL);
while (true) {
if (this->needNewReader) {
BucketContent *key = this->getNextKey();
if (key == NULL) {
S3DEBUG("Read finished for segment: %d", this->segId);
return 0;
}
this->upstreamReader->open(getReaderParams(key));
this->needNewReader = false;
}
uint64_t readCount = this->upstreamReader->read(buf, count);
if (readCount != 0) {
return readCount;
}
// Finished one file, continue to next
this->upstreamReader->close();
this->needNewReader = true;
}
CHECK_OR_DIE(this->upstreamReader != NULL);
while (true) {
if (this->needNewReader) {
BucketContent *key = this->getNextKey();
if (key == NULL) {
S3DEBUG("Read finished for segment: %d", this->segId);
return 0;
}
this->upstreamReader->open(getReaderParams(key));
this->needNewReader = false;
}
uint64_t readCount = this->upstreamReader->read(buf, count);
if (readCount != 0) {
return readCount;
}
// Finished one file, continue to next
this->upstreamReader->close();
this->needNewReader = true;
}
}
void S3BucketReader::close() {
......@@ -101,7 +100,7 @@ void S3BucketReader::close() {
this->keyList = NULL;
}
return;
return;
}
// Set schema to 'https' or 'http'
......@@ -118,7 +117,7 @@ void S3BucketReader::SetSchema() {
}
ListBucketResult *S3BucketReader::listBucketWithRetry(int retries) {
CHECK_OR_DIE(this->s3interface != NULL);
CHECK_OR_DIE(this->s3interface != NULL);
while (retries--) {
ListBucketResult *result = this->s3interface->ListBucket(
......@@ -127,13 +126,11 @@ ListBucketResult *S3BucketReader::listBucketWithRetry(int retries) {
return result;
}
S3INFO("Can't get keylist from bucket %s, retrying ...",
this->bucket.c_str());
S3INFO("Can't get keylist from bucket %s, retrying ...", this->bucket.c_str());
}
S3ERROR("Failed to list bucket for URL: %s", this->url.c_str());
CHECK_OR_DIE_MSG(false, "Failed to list bucket with retries: %s",
this->url.c_str());
CHECK_OR_DIE_MSG(false, "Failed to list bucket with retries: %s", this->url.c_str());
// return NULL; Not needed, as CHECK_OR_DIE_MSG will return always.
}
......@@ -149,8 +146,7 @@ string S3BucketReader::getKeyURL(const string &key) {
// http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
void S3BucketReader::SetRegion() {
size_t ibegin =
this->url.find("://s3") +
strlen("://s3"); // index of character('.' or '-') after "3"
this->url.find("://s3") + strlen("://s3"); // index of character('.' or '-') after "3"
size_t iend = this->url.find(".amazonaws.com");
if (iend == string::npos) {
......@@ -171,11 +167,29 @@ void S3BucketReader::SetRegion() {
void S3BucketReader::SetBucketAndPrefix() {
size_t ibegin = find_Nth(this->url, 3, "/");
size_t iend = find_Nth(this->url, 4, "/");
if ((iend == string::npos) || (ibegin == string::npos)) {
if (ibegin == string::npos) {
return;
}
// s3://s3-region.amazonaws.com/bucket
if (iend == string::npos) {
this->bucket = url.substr(ibegin + 1, url.length() - ibegin);
this->prefix = "";
return;
}
this->bucket = this->url.substr(ibegin + 1, iend - ibegin - 1);
this->prefix = this->url.substr(iend + 1, this->url.length() - iend - 1);
this->bucket = url.substr(ibegin + 1, iend - ibegin - 1);
// s3://s3-region.amazonaws.com/bucket/
if (iend == url.length()) {
this->prefix = "";
return;
}
ibegin = find_Nth(url, 4, "/");
// s3://s3-region.amazonaws.com/bucket/prefix
// s3://s3-region.amazonaws.com/bucket/prefix/whatever
this->prefix = url.substr(ibegin + 1, url.length() - ibegin - 1);
}
void S3BucketReader::validateURL() {
......
......@@ -34,9 +34,8 @@ static string encode_query_str(const string &query) {
#define DATE_STR_LEN 9
#define TIME_STAMP_STR_LEN 17
#define SHA256_DIGEST_STRING_LENGTH 65
bool SignRequestV4(const string &method, HTTPHeaders *h,
const string &orig_region, const string &path,
const string &query, const S3Credential &cred) {
void SignRequestV4(const string &method, HTTPHeaders *h, const string &orig_region,
const string &path, const string &query, const S3Credential &cred) {
struct tm tm_info;
char date_str[DATE_STR_LEN] = {0};
char timestamp_str[TIME_STAMP_STR_LEN] = {0};
......@@ -87,37 +86,33 @@ bool SignRequestV4(const string &method, HTTPHeaders *h,
stringstream kSecret;
kSecret << "AWS4" << cred.secret;
sha256hmac(date_str, key_date, kSecret.str().c_str(),
strlen(kSecret.str().c_str()));
sha256hmac(region.c_str(), key_region, (char *)key_date,
SHA256_DIGEST_LENGTH);
sha256hmac(date_str, key_date, kSecret.str().c_str(), strlen(kSecret.str().c_str()));
sha256hmac(region.c_str(), key_region, (char *)key_date, SHA256_DIGEST_LENGTH);
sha256hmac("s3", key_service, (char *)key_region, SHA256_DIGEST_LENGTH);
sha256hmac("aws4_request", signing_key, (char *)key_service,
SHA256_DIGEST_LENGTH);
sha256hmac_hex(string2sign_str.str().c_str(), signature_hex,
(char *)signing_key, SHA256_DIGEST_LENGTH);
sha256hmac("aws4_request", signing_key, (char *)key_service, SHA256_DIGEST_LENGTH);
sha256hmac_hex(string2sign_str.str().c_str(), signature_hex, (char *)signing_key,
SHA256_DIGEST_LENGTH);
stringstream signature_header;
signature_header << "AWS4-HMAC-SHA256 Credential=" << cred.keyid << "/"
<< date_str << "/" << region << "/"
signature_header << "AWS4-HMAC-SHA256 Credential=" << cred.keyid << "/" << date_str << "/"
<< region << "/"
<< "s3"
<< "/aws4_request,SignedHeaders=" << signed_headers
<< ",Signature=" << signature_hex;
h->Add(AUTHORIZATION, signature_header.str());
return true;
return;
}
// return the number of items
uint64_t XMLParserCallback(void *contents, uint64_t size, uint64_t nmemb,
void *userp) {
uint64_t XMLParserCallback(void *contents, uint64_t size, uint64_t nmemb, void *userp) {
uint64_t realsize = size * nmemb;
struct XMLInfo *pxml = (struct XMLInfo *)userp;
if (!pxml->ctxt) {
pxml->ctxt = xmlCreatePushParserCtxt(NULL, NULL, (const char *)contents,
realsize, "resp.xml");
pxml->ctxt =
xmlCreatePushParserCtxt(NULL, NULL, (const char *)contents, realsize, "resp.xml");
} else {
xmlParseChunk(pxml->ctxt, (const char *)contents, realsize, 0);
}
......@@ -145,8 +140,7 @@ char *get_opt_s3(const char *url, const char *key) {
// construct the key to search " key="
int key_len = strlen(key);
char *key2search = (char *)malloc(key_len + 3);
CHECK_OR_DIE_MSG(key2search != NULL,
"Can not allocate %d bytes memory for key string",
CHECK_OR_DIE_MSG(key2search != NULL, "Can not allocate %d bytes memory for key string",
key_len + 3);
snprintf(key2search, key_len + 3, " %s=", key);
......@@ -163,14 +157,11 @@ char *get_opt_s3(const char *url, const char *key) {
// get the length of string "blah1"
int value_len = strlen_to_next_char(value_start, ' ');
CHECK_OR_DIE_MSG(value_len != 0, "Can not find value of %s in %s", key,
url);
CHECK_OR_DIE_MSG(value_len != 0, "Can not find value of %s in %s", key, url);
// get the string "blah1"
char *value = strndup(value_start, value_len);
CHECK_OR_DIE_MSG(value != NULL,
"Can not allocate %d bytes memory for value string",
value_len);
CHECK_OR_DIE_MSG(value != NULL, "Can not allocate %d bytes memory for value string", value_len);
return value;
}
......@@ -183,9 +174,7 @@ char *truncate_options(const char *url_with_options) {
// get the string of url
char *url = strndup(url_with_options, url_len);
CHECK_OR_DIE_MSG(url != NULL,
"Can not allocate %d bytes memory for value string",
url_len);
CHECK_OR_DIE_MSG(url != NULL, "Can not allocate %d bytes memory for value string", url_len);
return url;
}
......@@ -65,11 +65,9 @@ bool InitConfig(const string& conf_path, const string section = "default") {
Config* s3cfg = new Config(conf_path);
if (s3cfg == NULL || !s3cfg->Handle()) {
#ifndef S3_STANDALONE
write_log("Failed to parse config file \"%s\", or it doesn't exist\n",
conf_path.c_str());
write_log("Failed to parse config file \"%s\", or it doesn't exist\n", conf_path.c_str());
#else
S3ERROR("Failed to parse config file \"%s\", or it doesn't exist",
conf_path.c_str());
S3ERROR("Failed to parse config file \"%s\", or it doesn't exist", conf_path.c_str());
#endif
delete s3cfg;
return false;
......@@ -87,11 +85,9 @@ bool InitConfig(const string& conf_path, const string section = "default") {
s3ext_secret = s3cfg->Get(section.c_str(), "secret", "");
s3ext_token = s3cfg->Get(section.c_str(), "token", "");
s3ext_logserverhost =
s3cfg->Get(section.c_str(), "logserverhost", "127.0.0.1");
s3ext_logserverhost = s3cfg->Get(section.c_str(), "logserverhost", "127.0.0.1");
bool ret = s3cfg->Scan(section.c_str(), "logserverport", "%d",
&s3ext_logserverport);
bool ret = s3cfg->Scan(section.c_str(), "logserverport", "%d", &s3ext_logserverport);
if (!ret) {
s3ext_logserverport = 1111;
}
......@@ -124,15 +120,13 @@ bool InitConfig(const string& conf_path, const string section = "default") {
s3ext_chunksize = 2 * 1024 * 1024;
}
ret = s3cfg->Scan(section.c_str(), "low_speed_limit", "%d",
&s3ext_low_speed_limit);
ret = s3cfg->Scan(section.c_str(), "low_speed_limit", "%d", &s3ext_low_speed_limit);
if (!ret) {
S3INFO("The low_speed_limit is set to default value %d bytes/s", 10240);
s3ext_low_speed_limit = 10240;
}
ret = s3cfg->Scan(section.c_str(), "low_speed_time", "%d",
&s3ext_low_speed_time);
ret = s3cfg->Scan(section.c_str(), "low_speed_time", "%d", &s3ext_low_speed_time);
if (!ret) {
S3INFO("The low_speed_time is set to default value %d seconds", 60);
s3ext_low_speed_time = 60;
......
......@@ -46,8 +46,7 @@ void S3ExtBase::SetSchema() {
// http://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region
void S3ExtBase::SetRegion() {
size_t ibegin =
this->url.find("://s3") +
strlen("://s3"); // index of character('.' or '-') after "3"
this->url.find("://s3") + strlen("://s3"); // index of character('.' or '-') after "3"
size_t iend = this->url.find(".amazonaws.com");
if (iend == string::npos) {
......@@ -68,11 +67,29 @@ void S3ExtBase::SetRegion() {
void S3ExtBase::SetBucketAndPrefix() {
size_t ibegin = find_Nth(this->url, 3, "/");
size_t iend = find_Nth(this->url, 4, "/");
if ((iend == string::npos) || (ibegin == string::npos)) {
if (ibegin == string::npos) {
return;
}
this->bucket = this->url.substr(ibegin + 1, iend - ibegin - 1);
this->prefix = this->url.substr(iend + 1, this->url.length() - iend - 1);
// s3://s3-region.amazonaws.com/bucket
if (iend == string::npos) {
this->bucket = url.substr(ibegin + 1, url.length() - ibegin);
this->prefix = "";
return;
}
this->bucket = url.substr(ibegin + 1, iend - ibegin - 1);
// s3://s3-region.amazonaws.com/bucket/
if (iend == url.length()) {
this->prefix = "";
return;
}
ibegin = find_Nth(url, 4, "/");
// s3://s3-region.amazonaws.com/bucket/prefix
// s3://s3-region.amazonaws.com/bucket/prefix/whatever
this->prefix = url.substr(ibegin + 1, url.length() - ibegin - 1);
}
bool S3ExtBase::ValidateURL() {
......@@ -80,6 +97,5 @@ bool S3ExtBase::ValidateURL() {
this->SetRegion();
this->SetBucketAndPrefix();
return !(this->schema.empty() || this->region.empty() ||
this->bucket.empty());
return !(this->schema.empty() || this->region.empty() || this->bucket.empty());
}
......@@ -12,6 +12,7 @@
#include "gps3ext.h"
#include "s3http_headers.h"
#include "s3log.h"
#include "s3macros.h"
#include "s3reader.h"
#include "s3url_parser.h"
#include "s3utils.h"
......@@ -19,106 +20,250 @@
#include "s3interface.h"
using std::stringstream;
S3Service::S3Service() {
// TODO Auto-generated constructor stub
}
class XMLContextHolder {
public:
XMLContextHolder(xmlParserCtxtPtr ctx) : context(ctx) {}
~XMLContextHolder() {
if (context != NULL) {
xmlFreeDoc(context->myDoc);
xmlFreeParserCtxt(context);
}
}
S3Service::~S3Service() {
// TODO Auto-generated destructor stub
}
private:
xmlParserCtxtPtr context;
};
// It is caller's responsibility to free returned memory.
ListBucketResult *S3Service::ListBucket(const string &schema,
const string &region,
const string &bucket,
const string &prefix,
const S3Credential &cred) {
// To get next up to 1000 keys.
// If marker is empty, get first 1000 then.
// S3 will return the last key as the next marker.
string marker = "";
S3Service::S3Service() : service(NULL) {}
S3Service::~S3Service() {}
// S3 requires query parameters specified alphabetically.
string S3Service::getUrl(const string &prefix, const string &schema, const string &host,
const string &bucket, const string &marker) {
stringstream url;
url << schema << "://" << host << "/" << bucket;
if (!marker.empty()) {
url << "?marker=" << marker;
}
if (!prefix.empty()) {
url << (marker.empty() ? "?" : "&") << "prefix=" << prefix;
}
return url.str();
}
HTTPHeaders S3Service::composeHTTPHeaders(const string &url, const string &marker,
const string &prefix, const string &region,
const S3Credential &cred) {
stringstream host;
host << "s3-" << region << ".amazonaws.com";
S3DEBUG("Host url is %s", host.str().c_str());
HTTPHeaders header;
header.Add(HOST, host.str());
UrlParser p(url.c_str());
header.Add(X_AMZ_CONTENT_SHA256, "UNSIGNED-PAYLOAD");
std::stringstream query;
if (!marker.empty()) {
query << "marker=" << marker;
if (!prefix.empty()) {
query << "&";
}
}
if (!prefix.empty()) {
query << "prefix=" << prefix;
}
ListBucketResult *result = new ListBucketResult();
if (!result) {
S3ERROR("Failed to allocate bucket list result");
SignRequestV4("GET", &header, region, p.Path(), query.str(), cred);
return header;
}
// require curl 7.17 higher
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
xmlParserCtxtPtr S3Service::getBucketXML(const string &region, const string &url,
const string &prefix, const S3Credential &cred,
const string &marker) {
HTTPHeaders header = composeHTTPHeaders(url, marker, prefix, region, cred);
std::map<string, string> empty;
Response response = service->get(url, header, empty);
if (!response.isSuccess()) {
S3ERROR("Failed to GET bucket list of '%s'", url.c_str());
return NULL;
}
stringstream url;
xmlParserCtxtPtr xmlcontext = NULL;
xmlParserCtxtPtr xmlptr =
xmlCreatePushParserCtxt(NULL, NULL, (const char *)response.getRawData().data(),
response.getRawData().size(), "resp.xml");
if (xmlptr != NULL) {
xmlParseChunk(xmlptr, "", 0, 1);
} else {
S3ERROR("Failed to create XML parser context");
}
return xmlptr;
}
do {
if (prefix != "") {
url << schema << "://" << host.str() << "/" << bucket << "?";
bool S3Service::checkAndParseBucketXML(ListBucketResult *result, xmlParserCtxtPtr xmlcontext,
string &marker) {
XMLContextHolder holder(xmlcontext);
if (marker != "") {
url << "marker=" << marker << "&";
xmlNode *rootElement = xmlDocGetRootElement(xmlcontext->myDoc);
if (rootElement == NULL) {
S3ERROR("Failed to parse returned xml of bucket list");
return false;
}
xmlNodePtr curNode = rootElement->xmlChildrenNode;
while (curNode != NULL) {
if (xmlStrcmp(curNode->name, (const xmlChar *)"Message") == 0) {
char *content = (char *)xmlNodeGetContent(curNode);
if (content != NULL) {
S3ERROR("Amazon S3 returns error \"%s\"", content);
xmlFree(content);
}
return false;
}
curNode = curNode->next;
}
// parseBucketXML will set marker for next round.
this->parseBucketXML(result, rootElement, marker);
return true;
}
void S3Service::parseBucketXML(ListBucketResult *result, xmlNode *root_element, string &marker) {
CHECK_OR_DIE((result != NULL && root_element != NULL));
url << "prefix=" << prefix;
} else {
url << schema << "://" << bucket << "." << host.str() << "?";
xmlNodePtr cur;
bool is_truncated = false;
char *content = NULL;
char *key = NULL;
char *key_size = NULL;
if (marker != "") {
url << "marker=" << marker;
cur = root_element->xmlChildrenNode;
while (cur != NULL) {
if (key) {
xmlFree(key);
key = NULL;
}
if (!xmlStrcmp(cur->name, (const xmlChar *)"IsTruncated")) {
content = (char *)xmlNodeGetContent(cur);
if (content) {
if (!strncmp(content, "true", 4)) {
is_truncated = true;
}
xmlFree(content);
}
}
xmlcontext = DoGetXML(region, url.str(), prefix, cred, marker);
if (!xmlcontext) {
S3ERROR("Failed to list bucket for %s", url.str().c_str());
delete result;
return NULL;
if (!xmlStrcmp(cur->name, (const xmlChar *)"Name")) {
content = (char *)xmlNodeGetContent(cur);
if (content) {
result->Name = content;
xmlFree(content);
}
}
xmlDocPtr doc = xmlcontext->myDoc;
xmlNode *root_element = xmlDocGetRootElement(xmlcontext->myDoc);
if (!root_element) {
S3ERROR("Failed to parse returned xml of bucket list");
delete result;
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
return NULL;
if (!xmlStrcmp(cur->name, (const xmlChar *)"Prefix")) {
content = (char *)xmlNodeGetContent(cur);
if (content) {
result->Prefix = content;
xmlFree(content);
// content is not used anymore in this loop
content = NULL;
}
}
xmlNodePtr cur = root_element->xmlChildrenNode;
while (cur != NULL) {
if (!xmlStrcmp(cur->name, (const xmlChar *)"Message")) {
char *content = (char *)xmlNodeGetContent(cur);
if (content) {
S3ERROR("Amazon S3 returns error \"%s\"", content);
xmlFree(content);
if (!xmlStrcmp(cur->name, (const xmlChar *)"Contents")) {
xmlNodePtr contNode = cur->xmlChildrenNode;
uint64_t size = 0;
while (contNode != NULL) {
// no memleak here, every content has only one Key/Size node
if (!xmlStrcmp(contNode->name, (const xmlChar *)"Key")) {
key = (char *)xmlNodeGetContent(contNode);
}
if (!xmlStrcmp(contNode->name, (const xmlChar *)"Size")) {
key_size = (char *)xmlNodeGetContent(contNode);
// Size of S3 file is a natural number, don't worry
size = (uint64_t)atoll((const char *)key_size);
}
contNode = contNode->next;
}
if (key) {
if (size > 0) { // skip empty item
BucketContent *item = new BucketContent(key, size);
if (item) {
result->contents.push_back(item);
} else {
S3ERROR("Faild to create item for %s", key);
}
} else {
S3INFO("Size of \"%s\" is %" PRIu64 ", skip it", key, size);
}
delete result;
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
return NULL;
}
cur = cur->next;
if (key_size) {
xmlFree(key_size);
key_size = NULL;
}
}
if (!extractContent(result, root_element, marker)) {
S3ERROR("Failed to extract key from bucket list");
cur = cur->next;
}
marker = (is_truncated && key) ? key : "";
if (key) {
xmlFree(key);
}
return;
}
// ListBucket list all keys in given bucket with given prefix.
//
// Return NULL when there is failure due to network instability or
// service unstable, so that caller could retry.
//
// Caller should delete returned object.
ListBucketResult *S3Service::ListBucket(const string &schema, const string &region,
const string &bucket, const string &prefix,
const S3Credential &cred) {
stringstream host;
host << "s3-" << region << ".amazonaws.com";
S3DEBUG("Host url is %s", host.str().c_str());
// TODO: here we have memory leak.
ListBucketResult *result = new ListBucketResult();
CHECK_OR_DIE_MSG(result != NULL, "%s", "Failed to allocate bucket list result");
string marker = "";
do { // To get next set(up to 1000) keys in one iteration.
// S3 requires query parameters specified alphabetically.
string url = this->getUrl(prefix, schema, host.str(), bucket, marker);
xmlParserCtxtPtr xmlcontext = getBucketXML(region, url, prefix, cred, marker);
if (xmlcontext == NULL) {
S3ERROR("Failed to list bucket for '%s'", url.c_str());
delete result;
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
return NULL;
}
// clear url
url.str("");
// always cleanup
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
xmlcontext = NULL;
} while (marker != "");
// parseBucketXML must not throw exception, otherwise result is leaked.
if (!checkAndParseBucketXML(result, xmlcontext, marker)) {
delete result;
return NULL;
}
} while (!marker.empty());
return result;
}
}
\ No newline at end of file
#include "s3keyreader.h"
S3KeyReader::S3KeyReader() {
// TODO Auto-generated constructor stub
// TODO Auto-generated constructor stub
}
S3KeyReader::~S3KeyReader() {
// TODO Auto-generated destructor stub
// TODO Auto-generated destructor stub
}
\ No newline at end of file
......@@ -36,8 +36,8 @@ void _LogMessage(const char* fmt, va_list args) {
void _send_to_remote(const char* fmt, va_list args) {
char buf[1024];
int len = vsnprintf(buf, sizeof(buf), fmt, args);
sendto(s3ext_logsock_udp, buf, len, 0,
(struct sockaddr*)&s3ext_logserveraddr, sizeof(struct sockaddr_in));
sendto(s3ext_logsock_udp, buf, len, 0, (struct sockaddr*)&s3ext_logserveraddr,
sizeof(struct sockaddr_in));
}
void LogMessage(LOGLEVEL loglevel, const char* fmt, ...) {
......
......@@ -14,15 +14,14 @@
#include "gps3ext.h"
#include "s3http_headers.h"
#include "s3log.h"
#include "s3macros.h"
#include "s3reader.h"
#include "s3url_parser.h"
#include "s3utils.h"
#include "s3macros.h"
using std::stringstream;
OffsetMgr::OffsetMgr(uint64_t m, uint64_t c)
: maxsize(m), chunksize(c), curpos(0) {
OffsetMgr::OffsetMgr(uint64_t m, uint64_t c) : maxsize(m), chunksize(c), curpos(0) {
pthread_mutex_init(&this->offset_lock, NULL);
}
......@@ -143,8 +142,7 @@ uint64_t BlockingBuffer::Fill() {
this->realsize = 0;
while (this->realsize < this->bufcap) {
if (leftlen != 0) {
readlen = this->fetchdata(offset, this->bufferdata + this->realsize,
leftlen);
readlen = this->fetchdata(offset, this->bufferdata + this->realsize, leftlen);
if (readlen == (uint64_t)-1) {
S3DEBUG("Failed to fetch data from libcurl");
} else {
......@@ -175,8 +173,7 @@ uint64_t BlockingBuffer::Fill() {
return (readlen == (uint64_t)-1) ? -1 : this->realsize;
}
BlockingBuffer *BlockingBuffer::CreateBuffer(const string &url,
const string &region, OffsetMgr *o,
BlockingBuffer *BlockingBuffer::CreateBuffer(const string &url, const string &region, OffsetMgr *o,
S3Credential *pcred) {
BlockingBuffer *ret = NULL;
if (url == "") return NULL;
......@@ -229,7 +226,7 @@ Downloader::Downloader(uint8_t part_num)
magic_bytes_num(0),
compression(S3_ZIP_NONE),
z_info(NULL) {
CHECK_OR_DIE(this->num != 0);
CHECK_OR_DIE(this->num != 0);
this->threads = (pthread_t *)malloc(num * sizeof(pthread_t));
if (this->threads)
memset((void *)this->threads, 0, num * sizeof(pthread_t));
......@@ -245,8 +242,8 @@ Downloader::Downloader(uint8_t part_num)
}
}
bool Downloader::init(const string &url, const string &region, uint64_t size,
uint64_t chunksize, S3Credential *pcred) {
bool Downloader::init(const string &url, const string &region, uint64_t size, uint64_t chunksize,
S3Credential *pcred) {
if (!this->threads || !this->buffers) {
return false;
}
......@@ -258,14 +255,13 @@ bool Downloader::init(const string &url, const string &region, uint64_t size,
}
for (int i = 0; i < this->num; i++) {
this->buffers[i] = BlockingBuffer::CreateBuffer(
url, region, o, pcred); // decide buffer according to url
this->buffers[i] =
BlockingBuffer::CreateBuffer(url, region, o, pcred); // decide buffer according to url
if (!this->buffers[i]->Init()) {
S3ERROR("Failed to init blocking buffer");
return false;
}
pthread_create(&this->threads[i], NULL, DownloadThreadfunc,
this->buffers[i]);
pthread_create(&this->threads[i], NULL, DownloadThreadfunc, this->buffers[i]);
}
readlen = 0;
......@@ -302,8 +298,8 @@ bool Downloader::get(char *data, uint64_t &len) {
// get first 4(at least 2) bytes to check if this file is compressed
BlockingBuffer *buf = buffers[this->chunkcount % this->num];
if ((this->magic_bytes_num = buf->Read(
(char *)this->magic_bytes, sizeof(this->magic_bytes))) > 1) {
if ((this->magic_bytes_num =
buf->Read((char *)this->magic_bytes, sizeof(this->magic_bytes))) > 1) {
if (!this->set_compression()) {
return false;
}
......@@ -344,8 +340,7 @@ RETRY:
// whether we need to buf->Read()
if (this->magic_bytes_num < filelen) {
tmplen +=
buf->Read(data + rest_magic_num, len - rest_magic_num);
tmplen += buf->Read(data + rest_magic_num, len - rest_magic_num);
}
}
} else {
......@@ -396,8 +391,7 @@ RETRY:
}
// no more available data to read, decompress or copy, done with this file
if ((this->readlen >= filelen) && !(zinfo->have_out - zinfo->done_out) &&
!strm->avail_in) {
if ((this->readlen >= filelen) && !(zinfo->have_out - zinfo->done_out) && !strm->avail_in) {
if (zinfo->inited) {
inflateEnd(strm);
}
......@@ -469,15 +463,13 @@ RETRY:
if (!zinfo->have_out) {
if (this->readlen < this->magic_bytes_num) {
uint64_t rest_magic_num = this->magic_bytes_num - this->readlen;
memcpy(zinfo->in, this->magic_bytes + this->readlen,
rest_magic_num);
memcpy(zinfo->in, this->magic_bytes + this->readlen, rest_magic_num);
strm->avail_in = rest_magic_num;
// whether we need to buf->Read()
if (this->magic_bytes_num < filelen) {
strm->avail_in +=
buf->Read((char *)zinfo->in + rest_magic_num,
S3_ZIP_CHUNKSIZE - rest_magic_num);
strm->avail_in += buf->Read((char *)zinfo->in + rest_magic_num,
S3_ZIP_CHUNKSIZE - rest_magic_num);
}
} else {
strm->avail_in = buf->Read((char *)zinfo->in, S3_ZIP_CHUNKSIZE);
......@@ -550,8 +542,7 @@ Downloader::~Downloader() {
}
// return the number of items
static uint64_t WriterCallback(void *contents, uint64_t size, uint64_t nmemb,
void *userp) {
static uint64_t WriterCallback(void *contents, uint64_t size, uint64_t nmemb, void *userp) {
uint64_t realsize = size * nmemb;
Bufinfo *p = reinterpret_cast<Bufinfo *>(userp);
......@@ -616,8 +607,7 @@ uint64_t HTTPFetcher::fetchdata(uint64_t offset, char *data, uint64_t len) {
char rangebuf[128] = {0};
long respcode;
snprintf(rangebuf, 128, "bytes=%" PRIu64 "-%" PRIu64, offset,
offset + len - 1);
snprintf(rangebuf, 128, "bytes=%" PRIu64 "-%" PRIu64, offset, offset + len - 1);
while (retry_time--) {
// "Don't call cleanup() if you intend to transfer more files, re-using
......@@ -633,17 +623,12 @@ uint64_t HTTPFetcher::fetchdata(uint64_t offset, char *data, uint64_t len) {
curl_easy_setopt(curl_handle, CURLOPT_WRITEDATA, (void *)&bi);
// consider low speed as timeout
curl_easy_setopt(curl_handle, CURLOPT_LOW_SPEED_LIMIT,
s3ext_low_speed_limit);
curl_easy_setopt(curl_handle, CURLOPT_LOW_SPEED_TIME,
s3ext_low_speed_time);
curl_easy_setopt(curl_handle, CURLOPT_LOW_SPEED_LIMIT, s3ext_low_speed_limit);
curl_easy_setopt(curl_handle, CURLOPT_LOW_SPEED_TIME, s3ext_low_speed_time);
this->AddHeaderField(RANGE, rangebuf);
this->AddHeaderField(X_AMZ_CONTENT_SHA256, "UNSIGNED-PAYLOAD");
if (!this->processheader()) {
S3ERROR("Failed to sign while fetching data, retry");
continue;
}
this->signHeader();
this->headers.FreeList();
this->headers.CreateList();
......@@ -669,14 +654,13 @@ uint64_t HTTPFetcher::fetchdata(uint64_t offset, char *data, uint64_t len) {
}
if (res != CURLE_OK) {
S3ERROR("curl_easy_perform() failed: %s, retry",
curl_easy_strerror(res));
S3ERROR("curl_easy_perform() failed: %s, retry", curl_easy_strerror(res));
bi.len = -1;
continue;
} else {
curl_easy_getinfo(curl_handle, CURLINFO_RESPONSE_CODE, &respcode);
S3DEBUG("Fetched %llu, %llu - %llu, response code is %ld", len,
offset, offset + len - 1, respcode);
S3DEBUG("Fetched %llu, %llu - %llu, response code is %ld", len, offset,
offset + len - 1, respcode);
if ((respcode != 200) && (respcode != 206)) {
S3ERROR("get %.*s, retry", (int)bi.len, data);
......@@ -704,296 +688,8 @@ S3Fetcher::S3Fetcher(const string &url, const string &region, OffsetMgr *o,
this->region = region;
}
bool S3Fetcher::processheader() {
return SignRequestV4("GET", &this->headers, this->region,
this->urlparser.Path(), "", this->cred);
}
// CreateBucketContentItem
BucketContent::~BucketContent() {}
BucketContent::BucketContent() : name(""), size(0) {}
BucketContent *CreateBucketContentItem(const string &key, uint64_t size) {
if (key == "") return NULL;
BucketContent *ret = new BucketContent();
if (!ret) {
S3ERROR("Can't create bucket list, no enough memory?");
return NULL;
}
ret->name = key;
ret->size = size;
return ret;
}
// require curl 7.17 higher
// http://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketGET.html
xmlParserCtxtPtr DoGetXML(const string &region, const string &url,
const string &prefix, const S3Credential &cred,
const string &marker) {
stringstream host;
host << "s3-" << region << ".amazonaws.com";
CURL *curl = curl_easy_init();
if (curl) {
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
#if DEBUG_S3_CURL
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
#endif
curl_easy_setopt(curl, CURLOPT_FORBID_REUSE, 1L);
} else {
S3ERROR("Can't create curl instance, no enough memory?");
return NULL;
}
XMLInfo xml;
xml.ctxt = NULL;
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *)&xml);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, XMLParserCallback);
HTTPHeaders *header = new HTTPHeaders();
if (!header) {
S3ERROR("Can allocate memory for header");
return NULL;
}
header->Add(HOST, host.str());
UrlParser p(url.c_str());
header->Add(X_AMZ_CONTENT_SHA256, "UNSIGNED-PAYLOAD");
std::stringstream query;
if (marker != "") {
query << "marker=" << marker << "&";
}
query << "prefix=" << prefix;
if (!SignRequestV4("GET", header, region, p.Path(), query.str(), cred)) {
S3ERROR("Failed to sign in DoGetXML()");
delete header;
return NULL;
}
header->CreateList();
struct curl_slist *chunk = header->GetList();
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, chunk);
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK) {
S3ERROR("curl_easy_perform() failed: %s", curl_easy_strerror(res));
if (xml.ctxt) {
xmlDocPtr doc = xml.ctxt->myDoc;
xmlFreeParserCtxt(xml.ctxt);
xmlFreeDoc(doc);
xml.ctxt = NULL;
}
} else {
if (xml.ctxt) {
xmlParseChunk(xml.ctxt, "", 0, 1);
} else {
S3ERROR("XML is downloaded but failed to be parsed");
}
}
curl_easy_cleanup(curl);
header->FreeList();
delete header;
return xml.ctxt;
}
bool extractContent(ListBucketResult *result, xmlNode *root_element,
string &marker) {
if (!result || !root_element) {
return false;
}
xmlNodePtr cur;
bool is_truncated = false;
char *content = NULL;
char *key = NULL;
char *key_size = NULL;
cur = root_element->xmlChildrenNode;
while (cur != NULL) {
if (key) {
xmlFree(key);
key = NULL;
}
if (!xmlStrcmp(cur->name, (const xmlChar *)"IsTruncated")) {
content = (char *)xmlNodeGetContent(cur);
if (content) {
if (!strncmp(content, "true", 4)) {
is_truncated = true;
}
xmlFree(content);
}
}
if (!xmlStrcmp(cur->name, (const xmlChar *)"Name")) {
content = (char *)xmlNodeGetContent(cur);
if (content) {
result->Name = content;
xmlFree(content);
}
}
if (!xmlStrcmp(cur->name, (const xmlChar *)"Prefix")) {
content = (char *)xmlNodeGetContent(cur);
if (content) {
result->Prefix = content;
xmlFree(content);
// content is not used anymore in this loop
content = NULL;
}
}
if (!xmlStrcmp(cur->name, (const xmlChar *)"Contents")) {
xmlNodePtr contNode = cur->xmlChildrenNode;
uint64_t size = 0;
while (contNode != NULL) {
// no memleak here, every content has only one Key/Size node
if (!xmlStrcmp(contNode->name, (const xmlChar *)"Key")) {
key = (char *)xmlNodeGetContent(contNode);
}
if (!xmlStrcmp(contNode->name, (const xmlChar *)"Size")) {
key_size = (char *)xmlNodeGetContent(contNode);
// Size of S3 file is a natural number, don't worry
size = (uint64_t)atoll((const char *)key_size);
}
contNode = contNode->next;
}
if (key) {
if (size > 0) { // skip empty item
BucketContent *item = CreateBucketContentItem(key, size);
if (item) {
result->contents.push_back(item);
} else {
S3ERROR("Faild to create item for %s", key);
}
} else {
S3INFO("Size of \"%s\" is %" PRIu64 ", skip it", key, size);
}
}
if (key_size) {
xmlFree(key_size);
key_size = NULL;
}
}
cur = cur->next;
}
marker = (is_truncated && key) ? key : "";
if (key) {
xmlFree(key);
}
return true;
}
// It is caller's responsibility to free returned memory.
ListBucketResult *ListBucket(const string &schema, const string &region,
const string &bucket, const string &prefix,
const S3Credential &cred) {
// To get next up to 1000 keys.
// If marker is empty, get first 1000 then.
// S3 will return the last key as the next marker.
string marker = "";
stringstream host;
host << "s3-" << region << ".amazonaws.com";
S3DEBUG("Host url is %s", host.str().c_str());
ListBucketResult *result = new ListBucketResult();
if (!result) {
S3ERROR("Failed to allocate bucket list result");
return NULL;
}
stringstream url;
xmlParserCtxtPtr xmlcontext = NULL;
do {
if (prefix != "") {
url << schema << "://" << host.str() << "/" << bucket << "?";
if (marker != "") {
url << "marker=" << marker << "&";
}
url << "prefix=" << prefix;
} else {
url << schema << "://" << bucket << "." << host.str() << "?";
if (marker != "") {
url << "marker=" << marker;
}
}
xmlcontext = DoGetXML(region, url.str(), prefix, cred, marker);
if (!xmlcontext) {
S3ERROR("Failed to list bucket for %s", url.str().c_str());
delete result;
return NULL;
}
xmlDocPtr doc = xmlcontext->myDoc;
xmlNode *root_element = xmlDocGetRootElement(xmlcontext->myDoc);
if (!root_element) {
S3ERROR("Failed to parse returned xml of bucket list");
delete result;
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
return NULL;
}
xmlNodePtr cur = root_element->xmlChildrenNode;
while (cur != NULL) {
if (!xmlStrcmp(cur->name, (const xmlChar *)"Message")) {
char *content = (char *)xmlNodeGetContent(cur);
if (content) {
S3ERROR("Amazon S3 returns error \"%s\"", content);
xmlFree(content);
}
delete result;
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
return NULL;
}
cur = cur->next;
}
if (!extractContent(result, root_element, marker)) {
S3ERROR("Failed to extract key from bucket list");
delete result;
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
return NULL;
}
// clear url
url.str("");
// always cleanup
xmlFreeParserCtxt(xmlcontext);
xmlFreeDoc(doc);
xmlcontext = NULL;
} while (marker != "");
return result;
void S3Fetcher::signHeader() {
SignRequestV4("GET", &this->headers, this->region, this->urlparser.Path(), "", this->cred);
}
ListBucketResult::~ListBucketResult() {
......
#include <unistd.h>
#define __STDC_FORMAT_MACROS
#include <curl/curl.h>
#include <inttypes.h>
#include <map>
#include <string>
#include "s3http_headers.h"
#include "s3log.h"
#include "s3macros.h"
#include "s3restul_service.h"
using namespace std;
S3RESTfulService::S3RESTfulService() {}
S3RESTfulService::~S3RESTfulService() {}
size_t RESTfulServiceCallback(char *ptr, size_t size, size_t nmemb, void *userp) {
size_t realsize = size * nmemb;
Response *resp = (Response *)userp;
resp->appendBuffer(ptr, realsize);
return realsize;
}
Response S3RESTfulService::get(const string &url, HTTPHeaders &headers,
const map<string, string> &params) {
Response response;
CURL *curl = curl_easy_init();
CHECK_OR_DIE_MSG(curl != NULL, "%s", "Failed to create curl handler");
headers.CreateList();
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_FORBID_REUSE, 1L);
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, headers.GetList());
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *)&response);
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, RESTfulServiceCallback);
map<string, string>::const_iterator iter = params.find("debug");
if (iter != params.end() && iter->second == "true") {
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
}
#if DEBUG_S3_CURL
curl_easy_setopt(curl, CURLOPT_VERBOSE, 1L);
#endif
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK) {
S3ERROR("curl_easy_perform() failed: %s", curl_easy_strerror(res));
response.clearBuffer();
response.setStatus(FAIL);
response.setMessage(
string("failed to talk to s3 service ").append(curl_easy_strerror(res)));
} else {
response.setStatus(OK);
response.setMessage("Success");
}
curl_easy_cleanup(curl);
headers.FreeList();
return response;
}
......@@ -23,8 +23,7 @@ static unsigned long id_function(void) { return ((unsigned long)THREAD_ID); }
int thread_setup(void) {
int i;
mutex_buf =
(pthread_mutex_t *)malloc(CRYPTO_num_locks() * sizeof(MUTEX_TYPE));
mutex_buf = (pthread_mutex_t *)malloc(CRYPTO_num_locks() * sizeof(MUTEX_TYPE));
if (!mutex_buf) return 0;
for (i = 0; i < CRYPTO_num_locks(); i++) MUTEX_SETUP(mutex_buf[i]);
CRYPTO_set_id_callback(id_function);
......
......@@ -23,8 +23,8 @@ struct debug_data {
char trace_ascii; /* 1 or 0 */
};
static void dump_debug_data(const char *text, FILE *stream, unsigned char *ptr,
size_t size, char nohex) {
static void dump_debug_data(const char *text, FILE *stream, unsigned char *ptr, size_t size,
char nohex) {
size_t i;
size_t c;
......@@ -33,8 +33,7 @@ static void dump_debug_data(const char *text, FILE *stream, unsigned char *ptr,
if (nohex) /* without the hex output, we can fit more on screen */
width = 0x40;
fprintf(stream, "%s, %10.10ld bytes (0x%8.8lx)\n", text, (long)size,
(long)size);
fprintf(stream, "%s, %10.10ld bytes (0x%8.8lx)\n", text, (long)size, (long)size);
for (i = 0; i < size; i += width) {
fprintf(stream, "%4.4lx: ", (long)i);
......@@ -51,17 +50,13 @@ static void dump_debug_data(const char *text, FILE *stream, unsigned char *ptr,
for (c = 0; (c < width) && (i + c < size); c++) {
/* check for 0D0A; if found, skip past and start a new line of
* output */
if (nohex && (i + c + 1 < size) && ptr[i + c] == 0x0D &&
ptr[i + c + 1] == 0x0A) {
if (nohex && (i + c + 1 < size) && ptr[i + c] == 0x0D && ptr[i + c + 1] == 0x0A) {
i += (c + 2 - width);
break;
}
fprintf(stream, "%c", (ptr[i + c] >= 0x20) && (ptr[i + c] < 0x80)
? ptr[i + c]
: '.');
fprintf(stream, "%c", (ptr[i + c] >= 0x20) && (ptr[i + c] < 0x80) ? ptr[i + c] : '.');
/* check again for 0D0A, to avoid an extra \n if it's at width */
if (nohex && (i + c + 2 < size) && ptr[i + c + 1] == 0x0D &&
ptr[i + c + 2] == 0x0A) {
if (nohex && (i + c + 2 < size) && ptr[i + c + 1] == 0x0D && ptr[i + c + 2] == 0x0A) {
i += (c + 3 - width);
break;
}
......@@ -71,8 +66,8 @@ static void dump_debug_data(const char *text, FILE *stream, unsigned char *ptr,
fflush(stream);
}
static int trace_debug_data(CURL *handle, curl_infotype type, char *data,
size_t size, void *userp) {
static int trace_debug_data(CURL *handle, curl_infotype type, char *data, size_t size,
void *userp) {
struct debug_data *config = (struct debug_data *)userp;
const char *text;
(void)handle; /* prevent compiler warning */
......@@ -103,8 +98,7 @@ static int trace_debug_data(CURL *handle, curl_infotype type, char *data,
break;
}
dump_debug_data(text, stderr, (unsigned char *)data, size,
config->trace_ascii);
dump_debug_data(text, stderr, (unsigned char *)data, size, config->trace_ascii);
return 0;
}
#endif
......@@ -115,12 +109,10 @@ struct MemoryData {
};
// return the number of items
static size_t mem_read_callback(void *ptr, size_t size, size_t nmemb,
void *userp) {
static size_t mem_read_callback(void *ptr, size_t size, size_t nmemb, void *userp) {
struct MemoryData *puppet = (struct MemoryData *)userp;
size_t realsize = size * nmemb;
size_t nmemb2read =
realsize < puppet->sizeleft ? nmemb : (puppet->sizeleft / size);
size_t nmemb2read = realsize < puppet->sizeleft ? nmemb : (puppet->sizeleft / size);
size_t n2read = nmemb2read * size;
// printf("n2read = %d, nmemb2read = %d, realsize = %d, puppet->sizeleft =
......@@ -136,8 +128,7 @@ static size_t mem_read_callback(void *ptr, size_t size, size_t nmemb,
}
// return the number of items
static size_t header_write_callback(void *contents, size_t size, size_t nmemb,
void *userp) {
static size_t header_write_callback(void *contents, size_t size, size_t nmemb, void *userp) {
size_t realsize = size * nmemb;
struct MemoryData *puppet = (struct MemoryData *)userp;
......@@ -157,8 +148,8 @@ static size_t header_write_callback(void *contents, size_t size, size_t nmemb,
}
// XXX need free
const char *GetUploadId(const char *host, const char *bucket,
const char *obj_name, const S3Credential &cred) {
const char *GetUploadId(const char *host, const char *bucket, const char *obj_name,
const S3Credential &cred) {
// POST /ObjectName?uploads HTTP/1.1
// Host: BucketName.s3.amazonaws.com
// Date: date
......@@ -222,8 +213,7 @@ const char *GetUploadId(const char *host, const char *bucket,
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(res));
xmlParseChunk(xml.ctxt, "", 0, 1);
header->FreeList();
......@@ -257,9 +247,8 @@ const char *GetUploadId(const char *host, const char *bucket,
}
// XXX need free
const char *PartPutS3Object(const char *host, const char *bucket,
const char *obj_name, const S3Credential &cred,
const char *data, uint64_t data_size,
const char *PartPutS3Object(const char *host, const char *bucket, const char *obj_name,
const S3Credential &cred, const char *data, uint64_t data_size,
uint64_t part_number, const char *upload_id) {
std::stringstream url;
std::stringstream path_with_query;
......@@ -298,8 +287,7 @@ const char *PartPutS3Object(const char *host, const char *bucket,
header->Add(CONTENTTYPE, "text/plain");
header->Add(CONTENTLENGTH, std::to_string(data_size));
UrlParser p(url.str().c_str());
path_with_query << p.Path() << "?partNumber=" << part_number
<< "&uploadId=" << upload_id;
path_with_query << p.Path() << "?partNumber=" << part_number << "&uploadId=" << upload_id;
SignPUTv2(header, path_with_query.str(), cred);
CURL *curl = curl_easy_init();
......@@ -345,8 +333,7 @@ const char *PartPutS3Object(const char *host, const char *bucket,
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(res));
// to get the Etag from response
// HTTP/1.1 200 OK
......@@ -425,9 +412,8 @@ const char *PartPutS3Object(const char *host, const char *bucket,
return NULL;
}
bool CompleteMultiPutS3(const char *host, const char *bucket,
const char *obj_name, const char *upload_id,
const char **etag_array, uint64_t count,
bool CompleteMultiPutS3(const char *host, const char *bucket, const char *obj_name,
const char *upload_id, const char **etag_array, uint64_t count,
const S3Credential &cred) {
std::stringstream url;
std::stringstream path_with_query;
......@@ -471,9 +457,8 @@ bool CompleteMultiPutS3(const char *host, const char *bucket,
body << "<CompleteMultipartUpload>\n";
for (uint64_t i = 0; i < count; ++i) {
body << " <Part>\n <PartNumber>" << i + 1
<< "</PartNumber>\n <ETag>" << etag_array[i]
<< "</ETag>\n </Part>\n";
body << " <Part>\n <PartNumber>" << i + 1 << "</PartNumber>\n <ETag>"
<< etag_array[i] << "</ETag>\n </Part>\n";
}
body << "</CompleteMultipartUpload>";
......@@ -534,8 +519,7 @@ bool CompleteMultiPutS3(const char *host, const char *bucket,
CURLcode res = curl_easy_perform(curl);
if (res != CURLE_OK)
fprintf(stderr, "curl_easy_perform() failed: %s\n",
curl_easy_strerror(res));
fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(res));
// HTTP/1.1 200 OK
// x-amz-id-2: Uuag1LuByRx9e6j5Onimru9pO4ZVKnJ2Qz7/C1NPcfTWAtRPfTaOFg==
......
......@@ -24,14 +24,11 @@ UrlParser::UrlParser(const char *url) {
this->fullurl = NULL;
this->fullurl = strdup(url);
CHECK_OR_DIE_MSG(this->fullurl != NULL, "%s",
"Could not allocate memory for fullurl");
CHECK_OR_DIE_MSG(this->fullurl != NULL, "%s", "Could not allocate memory for fullurl");
struct http_parser_url url_parser;
int result = http_parser_parse_url(this->fullurl, strlen(this->fullurl),
false, &url_parser);
CHECK_OR_DIE_MSG(result == 0, "Failed to parse URL %s at field %d",
this->fullurl, result);
int result = http_parser_parse_url(this->fullurl, strlen(this->fullurl), false, &url_parser);
CHECK_OR_DIE_MSG(result == 0, "Failed to parse URL %s at field %d", this->fullurl, result);
this->schema = extractField(&url_parser, UF_SCHEMA);
this->host = extractField(&url_parser, UF_HOST);
......@@ -50,12 +47,10 @@ UrlParser::~UrlParser() {
this->fullurl = NULL;
}
char *UrlParser::extractField(const struct http_parser_url *url_parser,
http_parser_url_fields i) {
char *UrlParser::extractField(const struct http_parser_url *url_parser, http_parser_url_fields i) {
if ((url_parser->field_set & (1 << i)) == 0) {
return NULL;
}
return strndup(this->fullurl + url_parser->field_data[i].off,
url_parser->field_data[i].len);
return strndup(this->fullurl + url_parser->field_data[i].off, url_parser->field_data[i].len);
}
......@@ -56,8 +56,7 @@ bool trim(char *out, const char *in, const char *trimed) {
targetlen = strlen(in);
while (targetlen > 0) {
if (strchr(trimed, in[targetlen - 1]) ==
NULL) // can't find stripped char
if (strchr(trimed, in[targetlen - 1]) == NULL) // can't find stripped char
break;
else
targetlen--;
......@@ -78,8 +77,7 @@ bool trim(char *out, const char *in, const char *trimed) {
}
// not returning the normal hex result, might have '\0'
bool sha1hmac(const char *str, unsigned char out_hash[20], const char *secret,
int secret_len) {
bool sha1hmac(const char *str, unsigned char out_hash[20], const char *secret, int secret_len) {
if (!str) return false;
unsigned int len = 32;
......@@ -95,8 +93,7 @@ bool sha1hmac(const char *str, unsigned char out_hash[20], const char *secret,
return true;
}
bool sha1hmac_hex(const char *str, char out_hash_hex[41], const char *secret,
int secret_len) {
bool sha1hmac_hex(const char *str, char out_hash_hex[41], const char *secret, int secret_len) {
if (!str) return false;
unsigned char hash[20];
......@@ -139,8 +136,7 @@ bool sha256_hex(const char *string, char out_hash_hex[65]) {
return true;
}
bool sha256hmac(const char *str, unsigned char out_hash[32], const char *secret,
int secret_len) {
bool sha256hmac(const char *str, unsigned char out_hash[32], const char *secret, int secret_len) {
if (!str) return false;
unsigned int len = 32;
......@@ -156,8 +152,7 @@ bool sha256hmac(const char *str, unsigned char out_hash[32], const char *secret,
return true;
}
bool sha256hmac_hex(const char *str, char out_hash_hex[65], const char *secret,
int secret_len) {
bool sha256hmac_hex(const char *str, char out_hash_hex[65], const char *secret, int secret_len) {
if (!str) return false;
unsigned char hash[SHA256_DIGEST_LENGTH]; // 32
......@@ -227,8 +222,7 @@ const char *MD5Calc::Get() {
MD5_Final(this->md5, &c);
std::stringstream ss;
for (int i = 0; i < 16; i++)
ss << std::hex << std::setw(2) << std::setfill('0')
<< (int)this->md5[i];
ss << std::hex << std::setw(2) << std::setfill('0') << (int)this->md5[i];
this->result = ss.str();
// Reset MD5 context
......@@ -271,8 +265,7 @@ Config::~Config() {
if (this->_conf) ini_free(this->_conf);
}
string Config::Get(const string &sec, const string &key,
const string &defaultvalue) {
string Config::Get(const string &sec, const string &key, const string &defaultvalue) {
string ret = defaultvalue;
if ((key == "") || (sec == "")) return ret;
......@@ -283,8 +276,7 @@ string Config::Get(const string &sec, const string &key,
return ret;
}
bool Config::Scan(const string &sec, const string &key, const char *scanfmt,
void *dst) {
bool Config::Scan(const string &sec, const string &key, const char *scanfmt, void *dst) {
if ((key == "") || (sec == "")) return false;
if (this->_conf) {
......@@ -295,8 +287,7 @@ bool Config::Scan(const string &sec, const string &key, const char *scanfmt,
bool to_bool(std::string str) {
std::transform(str.begin(), str.end(), str.begin(), ::tolower);
if ((str == "yes") || (str == "true") || (str == "y") || (str == "t") ||
(str == "1")) {
if ((str == "yes") || (str == "true") || (str == "y") || (str == "t") || (str == "1")) {
return true;
} else {
return false;
......
......@@ -4,55 +4,82 @@
TEST(ExtBase, ValidateURL_normal) {
S3ExtBase *myData;
myData = new S3Reader(
"s3://s3-us-west-2.amazonaws.com/s3test.pivotal.io/dataset1/normal");
myData = new S3Reader("s3://s3-us-west-2.amazonaws.com/s3test.pivotal.io/dataset1/normal");
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("us-west-2", myData->get_region().c_str());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_EQ("dataset1/normal", myData->get_prefix());
delete myData;
}
TEST(ExtBase, ValidateURL_NoPrefixAndSlash) {
S3ExtBase *myData;
myData = new S3Reader("s3://s3-us-west-2.amazonaws.com/s3test.pivotal.io");
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("", myData->get_prefix().c_str());
delete myData;
}
TEST(ExtBase, ValidateURL_NoPrefix) {
S3ExtBase *myData;
myData = new S3Reader("s3://s3-us-west-2.amazonaws.com/s3test.pivotal.io/");
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("", myData->get_prefix().c_str());
delete myData;
}
TEST(ExtBase, ValidateURL_default) {
S3ExtBase *myData;
myData =
new S3Reader("s3://s3.amazonaws.com/s3test.pivotal.io/dataset1/normal");
myData = new S3Reader("s3://s3.amazonaws.com/s3test.pivotal.io/dataset1/normal");
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("external-1", myData->get_region().c_str());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("dataset1/normal", myData->get_prefix().c_str());
delete myData;
}
TEST(ExtBase, ValidateURL_useast1) {
S3ExtBase *myData;
myData = new S3Reader(
"s3://s3-us-east-1.amazonaws.com/s3test.pivotal.io/dataset1/normal");
myData = new S3Reader("s3://s3-us-east-1.amazonaws.com/s3test.pivotal.io/dataset1/normal");
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("external-1", myData->get_region().c_str());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("dataset1/normal", myData->get_prefix().c_str());
delete myData;
}
TEST(ExtBase, ValidateURL_eucentral1) {
S3ExtBase *myData;
myData = new S3Reader(
"s3://s3.eu-central-1.amazonaws.com/s3test.pivotal.io/dataset1/normal");
myData = new S3Reader("s3://s3.eu-central-1.amazonaws.com/s3test.pivotal.io/dataset1/normal");
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("eu-central-1", myData->get_region().c_str());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("dataset1/normal", myData->get_prefix().c_str());
delete myData;
}
TEST(ExtBase, ValidateURL_eucentral11) {
S3ExtBase *myData;
myData = new S3Reader(
"s3://s3-eu-central-1.amazonaws.com/s3test.pivotal.io/dataset1/normal");
myData = new S3Reader("s3://s3-eu-central-1.amazonaws.com/s3test.pivotal.io/dataset1/normal");
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("eu-central-1", myData->get_region().c_str());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("dataset1/normal", myData->get_prefix().c_str());
delete myData;
}
......@@ -65,6 +92,8 @@ TEST(ExtBase, ValidateURL_apnortheast2) {
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("ap-northeast-2", myData->get_region().c_str());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("dataset1/normal", myData->get_prefix().c_str());
delete myData;
}
......@@ -77,6 +106,8 @@ TEST(ExtBase, ValidateURL_apnortheast21) {
ASSERT_TRUE(myData->ValidateURL());
EXPECT_STREQ("ap-northeast-2", myData->get_region().c_str());
EXPECT_STREQ("s3test.pivotal.io", myData->get_bucket().c_str());
EXPECT_STREQ("dataset1/normal", myData->get_prefix().c_str());
delete myData;
}
#include "s3bucket_reader.cpp"
#include "s3interface.cpp"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "reader_params.h"
......@@ -13,39 +12,39 @@ using ::testing::_;
class MockS3Interface : public S3Interface {
public:
MOCK_METHOD5(ListBucket,
ListBucketResult*(const string& schema, const string& region,
const string& bucket, const string& prefix,
const S3Credential& cred));
ListBucketResult*(const string& schema, const string& region, const string& bucket,
const string& prefix, const S3Credential& cred));
};
class MockS3Reader : public Reader {
public:
MOCK_METHOD1(open, void(const ReaderParams& params));
MOCK_METHOD2(read, uint64_t(char *, uint64_t));
MOCK_METHOD2(read, uint64_t(char*, uint64_t));
MOCK_METHOD0(close, void());
};
// ================== S3BucketReaderTest ===================
class S3BucketReaderTest : public testing::Test {
protected:
// Remember that SetUp() is run immediately before a test starts.
virtual void SetUp() {
bucketReader = new S3BucketReader();
bucketReader->setS3interface(&s3interface);
}
// TearDown() is invoked immediately after a test finishes.
virtual void TearDown() {
bucketReader->close();
}
S3BucketReader* bucketReader;
ReaderParams params;
char buf[64];
MockS3Interface s3interface;
MockS3Reader s3reader;
protected:
// Remember that SetUp() is run immediately before a test starts.
virtual void SetUp() {
bucketReader = new S3BucketReader();
bucketReader->setS3interface(&s3interface);
}
// TearDown() is invoked immediately after a test finishes.
virtual void TearDown() {
bucketReader->close();
delete bucketReader;
}
S3BucketReader* bucketReader;
ReaderParams params;
char buf[64];
MockS3Interface s3interface;
MockS3Reader s3reader;
};
TEST_F(S3BucketReaderTest, OpenInvalidURL) {
......@@ -55,11 +54,9 @@ TEST_F(S3BucketReaderTest, OpenInvalidURL) {
}
TEST_F(S3BucketReaderTest, OpenURL) {
ListBucketResult *result = new ListBucketResult();
ListBucketResult* result = new ListBucketResult();
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _))
.Times(1)
.WillOnce(Return(result));
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _)).Times(1).WillOnce(Return(result));
string url = "https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever";
params.setUrl(url);
......@@ -72,121 +69,137 @@ TEST_F(S3BucketReaderTest, ListBucketWithRetryThrowException) {
}
TEST_F(S3BucketReaderTest, ListBucketWithRetryThrowExceptionWhenS3InterfaceIsNULL) {
bucketReader->setS3interface(NULL);
bucketReader->setS3interface(NULL);
EXPECT_THROW(bucketReader->listBucketWithRetry(1), std::runtime_error);
}
TEST_F(S3BucketReaderTest, ListBucketWithRetry) {
ListBucketResult *result = new ListBucketResult();
ListBucketResult* result = new ListBucketResult();
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _))
.Times(1)
.WillOnce(Return(result));
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _)).Times(1).WillOnce(Return(result));
EXPECT_NE((void*)NULL, bucketReader->listBucketWithRetry(1));
}
TEST_F(S3BucketReaderTest, ListBucketWithRetries) {
ListBucketResult *result = new ListBucketResult();
ListBucketResult* result = new ListBucketResult();
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _))
.Times(3)
.WillOnce(Return((ListBucketResult *)NULL))
.WillOnce(Return((ListBucketResult *)NULL))
.WillOnce(Return((ListBucketResult*)NULL))
.WillOnce(Return((ListBucketResult*)NULL))
.WillOnce(Return(result));
EXPECT_EQ(result, bucketReader->listBucketWithRetry(3));
}
TEST_F(S3BucketReaderTest, ReaderThrowExceptionWhenUpstreamReaderIsNULL) {
EXPECT_THROW(bucketReader->read(buf, sizeof(buf)), std::runtime_error);
EXPECT_THROW(bucketReader->read(buf, sizeof(buf)), std::runtime_error);
}
TEST_F(S3BucketReaderTest, ReaderReturnZeroForEmptyBucket) {
ListBucketResult *result = new ListBucketResult();
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _))
.Times(1)
.WillOnce(Return(result));
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
ListBucketResult* result = new ListBucketResult();
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _)).Times(1).WillOnce(Return(result));
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
}
TEST_F(S3BucketReaderTest, ReadBucketWithSingleFile) {
ListBucketResult *result = new ListBucketResult();
BucketContent* item = CreateBucketContentItem("foo", 456);
result->contents.push_back(item);
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _))
.Times(1)
.WillOnce(Return(result));
EXPECT_CALL(s3reader, read(_, _))
.Times(3)
.WillOnce(Return(256))
.WillOnce(Return(200))
.WillOnce(Return(0));
EXPECT_CALL(s3reader, open(_)).Times(1);
EXPECT_CALL(s3reader, close()).Times(1);
params.setSegId(0);
params.setSegNum(1);
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(256, bucketReader->read(buf, sizeof(buf)));
EXPECT_EQ(200, bucketReader->read(buf, sizeof(buf)));
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
ListBucketResult* result = new ListBucketResult();
BucketContent* item = new BucketContent("foo", 456);
result->contents.push_back(item);
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _)).Times(1).WillOnce(Return(result));
EXPECT_CALL(s3reader, read(_, _))
.Times(3)
.WillOnce(Return(256))
.WillOnce(Return(200))
.WillOnce(Return(0));
EXPECT_CALL(s3reader, open(_)).Times(1);
EXPECT_CALL(s3reader, close()).Times(1);
params.setSegId(0);
params.setSegNum(1);
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(256, bucketReader->read(buf, sizeof(buf)));
EXPECT_EQ(200, bucketReader->read(buf, sizeof(buf)));
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
}
TEST_F(S3BucketReaderTest, ReadBuckeWithOneEmptyFileOneNonEmptyFile) {
ListBucketResult *result = new ListBucketResult();
BucketContent* item = CreateBucketContentItem("foo", 0);
result->contents.push_back(item);
item = CreateBucketContentItem("bar", 456);
result->contents.push_back(item);
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _))
.Times(1)
.WillOnce(Return(result));
EXPECT_CALL(s3reader, read(_, _))
.Times(3)
.WillOnce(Return(0))
.WillOnce(Return(256))
.WillOnce(Return(0));
EXPECT_CALL(s3reader, open(_)).Times(2);
EXPECT_CALL(s3reader, close()).Times(2);
params.setSegId(0);
params.setSegNum(1);
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(256, bucketReader->read(buf, sizeof(buf)));
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
ListBucketResult* result = new ListBucketResult();
BucketContent* item = new BucketContent("foo", 0);
result->contents.push_back(item);
item = new BucketContent("bar", 456);
result->contents.push_back(item);
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _)).Times(1).WillOnce(Return(result));
EXPECT_CALL(s3reader, read(_, _))
.Times(3)
.WillOnce(Return(0))
.WillOnce(Return(256))
.WillOnce(Return(0));
EXPECT_CALL(s3reader, open(_)).Times(2);
EXPECT_CALL(s3reader, close()).Times(2);
params.setSegId(0);
params.setSegNum(1);
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(256, bucketReader->read(buf, sizeof(buf)));
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
}
TEST_F(S3BucketReaderTest, ReaderShouldSkipIfFileIsNotForThisSegment) {
ListBucketResult *result = new ListBucketResult();
BucketContent* item = CreateBucketContentItem("foo", 456);
result->contents.push_back(item);
ListBucketResult* result = new ListBucketResult();
BucketContent* item = new BucketContent("foo", 456);
result->contents.push_back(item);
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _)).Times(1).WillOnce(Return(result));
params.setSegId(10);
params.setSegNum(16);
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
}
TEST_F(S3BucketReaderTest, UpstreamReaderThrowException) {
ListBucketResult* result = new ListBucketResult();
BucketContent* item = new BucketContent("foo", 0);
result->contents.push_back(item);
item = new BucketContent("bar", 456);
result->contents.push_back(item);
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _)).Times(1).WillOnce(Return(result));
EXPECT_CALL(s3reader, read(_, _))
.Times(AtLeast(1))
.WillRepeatedly(Throw(std::runtime_error("")));
EXPECT_CALL(s3interface, ListBucket(_, _, _, _, _))
.Times(1)
.WillOnce(Return(result));
EXPECT_CALL(s3reader, open(_)).Times(1);
EXPECT_CALL(s3reader, close()).Times(0);
params.setSegId(10);
params.setSegNum(16);
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
params.setSegId(0);
params.setSegNum(1);
params.setUrl("https://s3-us-east-2.amazonaws.com/s3test.pivotal.io/whatever");
bucketReader->open(params);
bucketReader->setUpstreamReader(&s3reader);
EXPECT_EQ(0, bucketReader->read(buf, sizeof(buf)));
EXPECT_THROW(bucketReader->read(buf, sizeof(buf)), std::runtime_error);
EXPECT_THROW(bucketReader->read(buf, sizeof(buf)), std::runtime_error);
}
\ No newline at end of file
......@@ -28,111 +28,94 @@ TEST(Common, SignRequestV4) {
TEST(Common, UrlOptions) {
char *option = NULL;
EXPECT_STREQ(
"secret_test",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test",
"secret"));
EXPECT_STREQ("secret_test",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test", "secret"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ(
"\".\\!@#$%^&*()DFGHJK\"",
option = get_opt_s3(
"s3://neverland.amazonaws.com accessid=\".\\!@#$%^&*()DFGHJK\"",
"accessid"));
EXPECT_STREQ("\".\\!@#$%^&*()DFGHJK\"",
option = get_opt_s3(
"s3://neverland.amazonaws.com accessid=\".\\!@#$%^&*()DFGHJK\"", "accessid"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ(
"3456789",
option = get_opt_s3("s3://neverland.amazonaws.com chunksize=3456789",
"chunksize"));
EXPECT_STREQ("3456789", option = get_opt_s3("s3://neverland.amazonaws.com chunksize=3456789",
"chunksize"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ("secret_test",
option = get_opt_s3(
"s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789",
"secret"));
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789",
"secret"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ("\".\\!@#$%^&*()DFGHJK\"",
option = get_opt_s3(
"s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789",
"accessid"));
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789",
"accessid"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ("3456789",
option = get_opt_s3(
"s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789",
"chunksize"));
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789",
"chunksize"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ(
"secret_test",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah=whatever accessid=\".\\!@#$%^&*()DFGHJK\" "
"chunksize=3456789 KingOfTheWorld=sanpang",
"secret"));
EXPECT_STREQ("secret_test",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah=whatever accessid=\".\\!@#$%^&*()DFGHJK\" "
"chunksize=3456789 KingOfTheWorld=sanpang",
"secret"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ(
"secret_test",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah= accessid=\".\\!@#$%^&*()DFGHJK\" "
"chunksize=3456789 KingOfTheWorld=sanpang",
"secret"));
EXPECT_STREQ("secret_test",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah= accessid=\".\\!@#$%^&*()DFGHJK\" "
"chunksize=3456789 KingOfTheWorld=sanpang",
"secret"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ(
"3456789",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"chunksize=3456789 KingOfTheWorld=sanpang ",
"chunksize"));
EXPECT_STREQ("3456789", option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"chunksize=3456789 KingOfTheWorld=sanpang ",
"chunksize"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ(
"3456789",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"chunksize=3456789 KingOfTheWorld=sanpang ",
"chunksize"));
EXPECT_STREQ("3456789", option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"chunksize=3456789 KingOfTheWorld=sanpang ",
"chunksize"));
if (option) {
free(option);
option = NULL;
}
EXPECT_STREQ(
"=sanpang",
option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"chunksize=3456789 KingOfTheWorld==sanpang ",
"KingOfTheWorld"));
EXPECT_STREQ("=sanpang", option = get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"chunksize=3456789 KingOfTheWorld==sanpang ",
"KingOfTheWorld"));
if (option) {
free(option);
option = NULL;
......@@ -142,8 +125,7 @@ TEST(Common, UrlOptions) {
EXPECT_THROW(get_opt_s3(NULL, "accessid"), std::runtime_error);
EXPECT_THROW(get_opt_s3("s3://neverland.amazonaws.com", "secret"),
std::runtime_error);
EXPECT_THROW(get_opt_s3("s3://neverland.amazonaws.com", "secret"), std::runtime_error);
EXPECT_THROW(get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah=whatever accessid= chunksize=3456789 "
......@@ -151,17 +133,15 @@ TEST(Common, UrlOptions) {
"accessid"),
std::runtime_error);
EXPECT_THROW(
get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah=whatever chunksize=3456789 KingOfTheWorld=sanpang",
""),
std::runtime_error);
EXPECT_THROW(get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah=whatever chunksize=3456789 KingOfTheWorld=sanpang",
""),
std::runtime_error);
EXPECT_THROW(
get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah=whatever chunksize=3456789 KingOfTheWorld=sanpang",
NULL),
std::runtime_error);
EXPECT_THROW(get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"blah=whatever chunksize=3456789 KingOfTheWorld=sanpang",
NULL),
std::runtime_error);
EXPECT_THROW(get_opt_s3("s3://neverland.amazonaws.com secret=secret_test "
"chunksize=3456789 KingOfTheWorld=sanpang ",
......@@ -173,36 +153,33 @@ TEST(Common, TruncateOptions) {
char *truncated = NULL;
EXPECT_STREQ("s3://neverland.amazonaws.com",
truncated = truncate_options(
"s3://neverland.amazonaws.com secret=secret_test"));
truncated = truncate_options("s3://neverland.amazonaws.com secret=secret_test"));
if (truncated) {
free(truncated);
truncated = NULL;
}
EXPECT_STREQ(
"s3://neverland.amazonaws.com",
truncated = truncate_options(
"s3://neverland.amazonaws.com accessid=\".\\!@#$%^&*()DFGHJK\""));
EXPECT_STREQ("s3://neverland.amazonaws.com",
truncated = truncate_options(
"s3://neverland.amazonaws.com accessid=\".\\!@#$%^&*()DFGHJK\""));
if (truncated) {
free(truncated);
truncated = NULL;
}
EXPECT_STREQ("s3://neverland.amazonaws.com",
truncated = truncate_options(
"s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789"));
EXPECT_STREQ(
"s3://neverland.amazonaws.com",
truncated = truncate_options("s3://neverland.amazonaws.com secret=secret_test "
"accessid=\".\\!@#$%^&*()DFGHJK\" chunksize=3456789"));
if (truncated) {
free(truncated);
truncated = NULL;
}
EXPECT_STREQ("s3://neverland.amazonaws.com",
truncated = truncate_options(
"s3://neverland.amazonaws.com secret=secret_test "
"blah= accessid=\".\\!@#$%^&*()DFGHJK\" "
"chunksize=3456789 KingOfTheWorld=sanpang"));
truncated = truncate_options("s3://neverland.amazonaws.com secret=secret_test "
"blah= accessid=\".\\!@#$%^&*()DFGHJK\" "
"chunksize=3456789 KingOfTheWorld=sanpang"));
if (truncated) {
free(truncated);
truncated = NULL;
......
/*
* s3interface_test.cpp
*
* Created on: May 31, 2016
* Author: adam
*/
#include "../include/s3interface.h"
#include "s3interface.cpp"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "restful_service.cpp"
using ::testing::AtLeast;
using ::testing::Return;
using ::testing::Throw;
using ::testing::_;
class MockS3RESTfulService : public RESTfulService {
public:
MOCK_METHOD3(get, Response(const string &url, HTTPHeaders &headers,
const map<string, string> &params));
};
class XMLGenerator {
public:
XMLGenerator() : isTruncated(false) {}
XMLGenerator *setName(string name) {
this->name = name;
return this;
}
XMLGenerator *setPrefix(string prefix) {
this->prefix = prefix;
return this;
}
XMLGenerator *setMarker(string marker) {
this->marker = marker;
return this;
}
XMLGenerator *setIsTruncated(bool isTruncated) {
this->isTruncated = isTruncated;
return this;
}
XMLGenerator *pushBuckentContent(BucketContent content) {
this->contents.push_back(content);
return this;
}
vector<uint8_t> toXML() {
stringstream sstr;
sstr << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
<< "<ListBucketResult>"
<< "<Name>" << name << "</Name>"
<< "<Prefix>" << prefix << "</Prefix>"
<< "<Marker>" << marker << "</Marker>"
<< "<IsTruncated>" << (isTruncated ? "true" : "false") << "</IsTruncated>";
for (vector<BucketContent>::iterator it = contents.begin(); it != contents.end(); it++) {
sstr << "<Contents>"
<< "<Key>" << it->name << "</Key>"
<< "<Size>" << it->size << "</Size>"
<< "</Contents>";
}
sstr << "</ListBucketResult>";
string xml = sstr.str();
return vector<uint8_t>(xml.begin(), xml.end());
}
private:
string name;
string prefix;
string marker;
bool isTruncated;
vector<BucketContent> contents;
};
class S3ServiceTest : public testing::Test {
protected:
// Remember that SetUp() is run immediately before a test starts.
virtual void SetUp() {
s3ext_logtype = STDERR_LOG;
s3ext_loglevel = EXT_INFO;
s3service = new S3Service();
schema = "https";
s3service->setRESTfulService(&mockRestfulService);
}
// TearDown() is invoked immediately after a test finishes.
virtual void TearDown() { delete s3service; }
Response buildListBucketResponse(int numOfContent, bool isTruncated, int numOfZeroKeys = 0) {
XMLGenerator generator;
XMLGenerator *gen = &generator;
gen->setName("s3test.pivotal.io")
->setPrefix("s3files/")
->setIsTruncated(isTruncated)
->pushBuckentContent(BucketContent("s3files/", 0));
char buffer[32] = {0};
for (int i = 0; i < numOfContent; ++i) {
snprintf(buffer, 32, "files%d", i);
gen->pushBuckentContent(BucketContent(buffer, i + 1));
}
for (int i = 0; i < numOfZeroKeys; i++) {
snprintf(buffer, 32, "zerofiles%d", i);
gen->pushBuckentContent(BucketContent(buffer, 0));
}
return Response(OK, gen->toXML());
}
S3Service *s3service;
S3Credential cred;
string schema;
string region;
string bucket;
string prefix;
MockS3RESTfulService mockRestfulService;
Response response;
};
TEST_F(S3ServiceTest, ListBucketThrowExceptionWhenBucketStringIsEmpty) {
EXPECT_THROW(s3service->ListBucket("", "", "", "", cred), std::runtime_error);
}
TEST_F(S3ServiceTest, ListBucketWithWrongRegion) {
EXPECT_CALL(mockRestfulService, get(_, _, _)).WillOnce(Return(response));
EXPECT_EQ((void *)NULL, s3service->ListBucket(schema, "nonexist", "", "", cred));
}
TEST_F(S3ServiceTest, ListBucketWithWrongBucketName) {
uint8_t xml[] =
"<?xml version=\"1.0\" encoding=\"UTF-8\"?>"
"<Error>"
"<Code>PermanentRedirect</Code>"
"<Message>The bucket you are attempting to access must be addressed "
"using the specified endpoint. "
"Please send all future requests to this endpoint.</Message>"
"<Bucket>foo</Bucket><Endpoint>s3.amazonaws.com</Endpoint>"
"<RequestId>27DD9B7004AF83E3</RequestId>"
"<HostId>NL3pyGvn+FajhQLKz/"
"hXUzV1VnFbbwNjUQsqWeFiDANkV4EVkh8Kpq5NNAi27P7XDhoA9M9Xhg0=</HostId>"
"</Error>";
vector<uint8_t> raw(xml, xml + sizeof(xml) - 1);
Response response(OK, raw);
EXPECT_CALL(mockRestfulService, get(_, _, _)).WillOnce(Return(response));
EXPECT_EQ((void *)NULL, s3service->ListBucket(schema, "us-west-2", "foo/bar", "", cred));
}
TEST_F(S3ServiceTest, ListBucketWithNormalBucket) {
XMLGenerator generator;
XMLGenerator *gen = &generator;
gen->setName("s3test.pivotal.io")
->setPrefix("threebytes/")
->setIsTruncated(false)
->pushBuckentContent(BucketContent("threebytes/", 0))
->pushBuckentContent(BucketContent("threebytes/threebytes", 3));
Response response(OK, gen->toXML());
EXPECT_CALL(mockRestfulService, get(_, _, _)).WillOnce(Return(response));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "threebytes/", cred);
ASSERT_NE((void *)NULL, result);
EXPECT_EQ(1, result->contents.size());
}
TEST_F(S3ServiceTest, ListBucketWithBucketWith1000Keys) {
EXPECT_CALL(mockRestfulService, get(_, _, _))
.WillOnce(Return(this->buildListBucketResponse(1000, false)));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred);
ASSERT_NE((void *)NULL, result);
EXPECT_EQ(1000, result->contents.size());
}
TEST_F(S3ServiceTest, ListBucketWithBucketWith1001Keys) {
EXPECT_CALL(mockRestfulService, get(_, _, _))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(1, false)));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred);
ASSERT_NE((void *)NULL, result);
EXPECT_EQ(1001, result->contents.size());
}
TEST_F(S3ServiceTest, ListBucketWithBucketWithMoreThan1000Keys) {
EXPECT_CALL(mockRestfulService, get(_, _, _))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(120, false)));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred);
ASSERT_NE((void *)NULL, result);
EXPECT_EQ(5120, result->contents.size());
}
TEST_F(S3ServiceTest, ListBucketWithBucketWithTruncatedResponse) {
Response EmptyResponse;
EXPECT_CALL(mockRestfulService, get(_, _, _))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(EmptyResponse));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred);
EXPECT_EQ((void *)NULL, result);
}
TEST_F(S3ServiceTest, ListBucketWithBucketWithZeroSizedKeys) {
EXPECT_CALL(mockRestfulService, get(_, _, _))
.WillOnce(Return(this->buildListBucketResponse(0, true, 8)))
.WillOnce(Return(this->buildListBucketResponse(1000, true)))
.WillOnce(Return(this->buildListBucketResponse(120, false, 8)));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred);
ASSERT_NE((void *)NULL, result);
EXPECT_EQ(1120, result->contents.size());
}
TEST_F(S3ServiceTest, ListBucketWithEmptyBucket) {
EXPECT_CALL(mockRestfulService, get(_, _, _))
.WillOnce(Return(this->buildListBucketResponse(0, false, 0)));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred);
ASSERT_NE((void *)NULL, result);
EXPECT_EQ(0, result->contents.size());
}
TEST_F(S3ServiceTest, ListBucketWithAllZeroedFilesBucket) {
EXPECT_CALL(mockRestfulService, get(_, _, _))
.WillOnce(Return(this->buildListBucketResponse(0, false, 2)));
ListBucketResult *result =
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred);
ASSERT_NE((void *)NULL, result);
EXPECT_EQ(0, result->contents.size());
}
TEST_F(S3ServiceTest, ListBucketWithErrorResponse) {
EXPECT_CALL(mockRestfulService, get(_, _, _)).WillOnce(Return(response));
EXPECT_EQ((void *)NULL,
s3service->ListBucket(schema, "nonexist", "s3test.pivotal.io", "s3files/", cred));
}
TEST_F(S3ServiceTest, ListBucketWithErrorReturnedXML) {
uint8_t xml[] = "whatever";
vector<uint8_t> raw(xml, xml + sizeof(xml) - 1);
Response response(OK, raw);
EXPECT_CALL(mockRestfulService, get(_, _, _)).WillOnce(Return(response));
EXPECT_EQ((void *)NULL,
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred));
}
TEST_F(S3ServiceTest, ListBucketWithNonRootXML) {
uint8_t xml[] = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
vector<uint8_t> raw(xml, xml + sizeof(xml) - 1);
Response response(OK, raw);
EXPECT_CALL(mockRestfulService, get(_, _, _)).WillOnce(Return(response));
EXPECT_EQ((void *)NULL,
s3service->ListBucket(schema, "us-west-2", "s3test.pivotal.io", "s3files/", cred));
}
\ No newline at end of file
......@@ -38,4 +38,4 @@ TEST(OffsetMgr, reset) {
EXPECT_EQ(r.offset, 333);
EXPECT_EQ(r.len, 100);
delete o;
}
}
\ No newline at end of file
#include <algorithm>
#include <fstream>
#include <iostream>
#include <vector>
#include "gtest/gtest.h"
#include "s3http_headers.h"
#include "s3restful_service.cpp"
using namespace std;
TEST(S3RESTfulService, GetWithWrongHeader) {
HTTPHeaders headers;
map<string, string> params;
S3RESTfulService service;
string url = "https://www.bing.com/";
headers.Add(HOST, url);
headers.Add(CONTENTTYPE, "plain/text");
Response resp = service.get(url, headers, params);
EXPECT_EQ(OK, resp.getStatus());
EXPECT_EQ("Success", resp.getMessage());
// 302 returned from bing.com
EXPECT_EQ(true, resp.getRawData().size() < 500);
}
TEST(S3RESTfulService, DISABLED_GetWithEmptyHeader) {
HTTPHeaders headers;
map<string, string> params;
string url;
S3RESTfulService service;
url = "https://www.bing.com/";
Response resp = service.get(url, headers, params);
EXPECT_EQ(OK, resp.getStatus());
EXPECT_EQ("Success", resp.getMessage());
EXPECT_EQ(true, resp.getRawData().size() > 10000);
}
TEST(S3RESTfulService, GetWithoutURL) {
HTTPHeaders headers;
map<string, string> params;
string url;
S3RESTfulService service;
Response resp = service.get(url, headers, params);
EXPECT_EQ(FAIL, resp.getStatus());
EXPECT_EQ("failed to talk to s3 service URL using bad/illegal format or missing URL",
resp.getMessage());
}
TEST(S3RESTfulService, GetWithWrongURL) {
HTTPHeaders headers;
map<string, string> params;
string url;
S3RESTfulService service;
url = "https://www.bing.com/pivotal.html";
Response resp = service.get(url, headers, params);
EXPECT_EQ(OK, resp.getStatus());
EXPECT_EQ("Success", resp.getMessage());
// 404 returned from bing.com
EXPECT_EQ(0, resp.getRawData().size());
}
\ No newline at end of file
......@@ -3,8 +3,7 @@
#include "s3macros.h"
TEST(UrlParser, Routine) {
UrlParser *p = new UrlParser(
"https://www.google.com/search?sclient=psy-ab&site=&source=hp");
UrlParser *p = new UrlParser("https://www.google.com/search?sclient=psy-ab&site=&source=hp");
ASSERT_NE((void *)NULL, p);
EXPECT_STREQ("https", p->Schema());
......
......@@ -48,9 +48,7 @@ TEST(Utils, md5) {
TEST(Utils, sha256) {
char hash_str[65] = {0};
EXPECT_TRUE(sha256_hex(TEST_STRING, hash_str));
EXPECT_STREQ(
"d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592",
hash_str);
EXPECT_STREQ("d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592", hash_str);
}
TEST(Utils, sha1hmac) {
......@@ -64,9 +62,7 @@ TEST(Utils, sha1hmac) {
TEST(Utils, sha256hmac) {
char hash_str[65] = {0};
EXPECT_TRUE(sha256hmac_hex(TEST_STRING, hash_str, "key", 3));
EXPECT_STREQ(
"f7bc83f430538424b13298e6aa6fb143ef4d59a14946175997479dbc2d1a3cd8",
hash_str);
EXPECT_STREQ("f7bc83f430538424b13298e6aa6fb143ef4d59a14946175997479dbc2d1a3cd8", hash_str);
}
TEST(Utils, Config) {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册