Expand all tabs

Previously we use tab spaces for indent in java and c sources,
while we use 4 spaces for indent in python sources, but it is
sometimes confusing.

This patch expands all tab spaces to 4 spaces, to use 4 spaces
for general way to indent.

Bonus:
This patch also removes trailing white spaces.

Change-Id: I1b9e7688d606c0da1bc015b1a4d396d19f4dc18a
This commit is contained in:
Takashi Kajinami 2016-09-06 09:48:11 +09:00
parent b59319ab72
commit 2fdd244dce
71 changed files with 2586 additions and 2608 deletions

View File

@ -29,18 +29,18 @@
#include "sbus.h"
static int g_JavaAccessorsInitialized = 0;
static int g_JavaAccessorsInitialized = 0;
static jclass g_ClassRawMessage = NULL;
static jmethodID g_RawMessageCTOR = NULL;
static jfieldID g_FieldFDs = NULL;
static jfieldID g_FieldMetadata = NULL;
static jfieldID g_FieldParams = NULL;
static jclass g_ClassRawMessage = NULL;
static jmethodID g_RawMessageCTOR = NULL;
static jfieldID g_FieldFDs = NULL;
static jfieldID g_FieldMetadata = NULL;
static jfieldID g_FieldParams = NULL;
static jclass g_ClassFileDescriptor = NULL;
static jmethodID g_FDCTOR = NULL;
static jfieldID g_FieldRawFD = NULL;
static jclass g_ClassFileDescriptor = NULL;
static jmethodID g_FDCTOR = NULL;
static jfieldID g_FieldRawFD = NULL;
/*----------------------------------------------------------------------------
* init_java_accessors
@ -49,39 +49,39 @@ static jfieldID g_FieldRawFD = NULL;
* */
int init_java_accessors( JNIEnv* env )
{
// TODO: Fix to read only once.
// Currently seem to fail
// TODO: Fix to read only once.
// Currently seem to fail
//if( 1 == g_JavaAccessorsInitialized )
// return 0;
/*------------------------------------------------------------------------
* Reflecting SBusRawMessage
* */
g_ClassRawMessage =
(*env)->FindClass( env, "org/openstack/storlet/sbus/SBusRawMessage" );
g_ClassRawMessage =
(*env)->FindClass( env, "org/openstack/storlet/sbus/SBusRawMessage" );
if( NULL == g_ClassRawMessage )
return -1;
g_RawMessageCTOR =
(*env)->GetMethodID(env, g_ClassRawMessage, "<init>", "()V");
(*env)->GetMethodID(env, g_ClassRawMessage, "<init>", "()V");
if( NULL == g_RawMessageCTOR )
return -1;
g_FieldFDs =
(*env)->GetFieldID( env, g_ClassRawMessage,
"hFiles_", "[Ljava/io/FileDescriptor;");
(*env)->GetFieldID( env, g_ClassRawMessage,
"hFiles_", "[Ljava/io/FileDescriptor;");
if( NULL == g_FieldFDs )
return -1;
g_FieldParams =
(*env)->GetFieldID( env, g_ClassRawMessage,
"strParams_", "Ljava/lang/String;");
(*env)->GetFieldID( env, g_ClassRawMessage,
"strParams_", "Ljava/lang/String;");
if( NULL == g_FieldParams )
return -1;
g_FieldMetadata =
(*env)->GetFieldID( env, g_ClassRawMessage,
"strMetadata_", "Ljava/lang/String;");
(*env)->GetFieldID( env, g_ClassRawMessage,
"strMetadata_", "Ljava/lang/String;");
if( NULL == g_FieldMetadata )
return -1;
@ -89,17 +89,17 @@ int init_java_accessors( JNIEnv* env )
* Reflecting java.io.FileDescriptor
* */
g_ClassFileDescriptor =
(*env)->FindClass(env, "java/io/FileDescriptor");
(*env)->FindClass(env, "java/io/FileDescriptor");
if( NULL == g_ClassFileDescriptor )
return -1;
g_FDCTOR =
(*env)->GetMethodID(env, g_ClassFileDescriptor, "<init>", "()V");
(*env)->GetMethodID(env, g_ClassFileDescriptor, "<init>", "()V");
if( NULL == g_FDCTOR )
return -1;
g_FieldRawFD =
(*env)->GetFieldID(env, g_ClassFileDescriptor, "fd", "I");
(*env)->GetFieldID(env, g_ClassFileDescriptor, "fd", "I");
if( NULL == g_FieldRawFD )
return -1;
@ -114,133 +114,133 @@ int init_java_accessors( JNIEnv* env )
*
* */
JNIEXPORT void JNICALL
Java_org_openstack_storlet_sbus_SBusJNI_startLogger( JNIEnv* env,
jobject obj,
jstring jLevel,
jstring jContId )
Java_org_openstack_storlet_sbus_SBusJNI_startLogger( JNIEnv* env,
jobject obj,
jstring jLevel,
jstring jContId )
{
const char* pLogLevel = (*env)->GetStringUTFChars( env, jLevel, NULL );
if( NULL == pLogLevel )
return;
const char* pContId = (*env)->GetStringUTFChars( env, jContId, NULL );
const char* pLogLevel = (*env)->GetStringUTFChars( env, jLevel, NULL );
if( NULL == pLogLevel )
return;
const char* pContId = (*env)->GetStringUTFChars( env, jContId, NULL );
if( NULL == pContId )
return;
sbus_start_logger( pLogLevel, pContId);
sbus_start_logger( pLogLevel, pContId);
(*env)->ReleaseStringUTFChars( env, jLevel, pLogLevel );
(*env)->ReleaseStringUTFChars( env, jLevel, pLogLevel );
}
/*----------------------------------------------------------------------------
*
* */
JNIEXPORT void JNICALL
Java_org_openstack_storlet_sbus_SBusJNI_stopLogger( JNIEnv* env,
jobject obj )
Java_org_openstack_storlet_sbus_SBusJNI_stopLogger( JNIEnv* env,
jobject obj )
{
sbus_stop_logger();
sbus_stop_logger();
}
/*----------------------------------------------------------------------------
*
* */
JNIEXPORT jint JNICALL
Java_org_openstack_storlet_sbus_SBusJNI_createSBus( JNIEnv* env,
jobject obj,
jstring jstrPath )
Java_org_openstack_storlet_sbus_SBusJNI_createSBus( JNIEnv* env,
jobject obj,
jstring jstrPath )
{
int nBus = -1;
const char* pPath = (*env)->GetStringUTFChars( env, jstrPath, NULL );
if( NULL == pPath )
return -1;
int nBus = -1;
const char* pPath = (*env)->GetStringUTFChars( env, jstrPath, NULL );
if( NULL == pPath )
return -1;
nBus = sbus_create( pPath);
nBus = sbus_create( pPath);
(*env)->ReleaseStringUTFChars( env, jstrPath, pPath );
return nBus;
(*env)->ReleaseStringUTFChars( env, jstrPath, pPath );
return nBus;
}
/*----------------------------------------------------------------------------
*
* */
JNIEXPORT jint JNICALL
Java_org_openstack_storlet_sbus_SBusJNI_listenSBus( JNIEnv* env,
jobject obj,
jint jnBus )
Java_org_openstack_storlet_sbus_SBusJNI_listenSBus( JNIEnv* env,
jobject obj,
jint jnBus )
{
return sbus_listen( jnBus );
return sbus_listen( jnBus );
}
/*----------------------------------------------------------------------------
*
* */
JNIEXPORT jint JNICALL
Java_org_openstack_storlet_sbus_SBusJNI_sendRawMessage( JNIEnv* env,
jobject obj,
jstring jstrPath,
jobject jMsg )
Java_org_openstack_storlet_sbus_SBusJNI_sendRawMessage( JNIEnv* env,
jobject obj,
jstring jstrPath,
jobject jMsg )
{
syslog( LOG_DEBUG, "Inside sendRawMessage" );
if( init_java_accessors( env ) )
return -1;
syslog( LOG_DEBUG, "Inside sendRawMessage" );
if( init_java_accessors( env ) )
return -1;
int i,j;
int nStatus = 0;
int i,j;
int nStatus = 0;
const char* strSBusPath = 0;
const char* strMetadata = 0;
int nMetadataLen = 0;
const char* strParams = 0;
int nParamsLen = 0;
int* pFiles = 0;
int nFiles = 0;
const char* strSBusPath = 0;
const char* strMetadata = 0;
int nMetadataLen = 0;
const char* strParams = 0;
int nParamsLen = 0;
int* pFiles = 0;
int nFiles = 0;
strSBusPath = (*env)->GetStringUTFChars( env, jstrPath, NULL );
if( NULL == strSBusPath )
return -1;
strSBusPath = (*env)->GetStringUTFChars( env, jstrPath, NULL );
if( NULL == strSBusPath )
return -1;
jobjectArray jFileDscrArr =
(jobjectArray)(*env)->GetObjectField(env, jMsg, g_FieldFDs );
if( NULL != jFileDscrArr )
{
nFiles = (*env)->GetArrayLength(env, jFileDscrArr );
pFiles = (int*) malloc( nFiles * sizeof(int) );
for( i = 0; i < nFiles; ++i )
{
jobject jFileDscr =
(*env)->GetObjectArrayElement( env, jFileDscrArr, i );
pFiles[i] = (*env)->GetIntField( env, jFileDscr, g_FieldRawFD );
}
}
jstring jstrMetadata =
(jstring)(*env)->GetObjectField(env, jMsg, g_FieldMetadata );
if( NULL != jstrMetadata )
{
strMetadata = (*env)->GetStringUTFChars( env, jstrMetadata, NULL );
nMetadataLen = strlen( strMetadata );
}
jobjectArray jFileDscrArr =
(jobjectArray)(*env)->GetObjectField(env, jMsg, g_FieldFDs );
if( NULL != jFileDscrArr )
{
nFiles = (*env)->GetArrayLength(env, jFileDscrArr );
pFiles = (int*) malloc( nFiles * sizeof(int) );
for( i = 0; i < nFiles; ++i )
{
jobject jFileDscr =
(*env)->GetObjectArrayElement( env, jFileDscrArr, i );
pFiles[i] = (*env)->GetIntField( env, jFileDscr, g_FieldRawFD );
}
}
jstring jstrMetadata =
(jstring)(*env)->GetObjectField(env, jMsg, g_FieldMetadata );
if( NULL != jstrMetadata )
{
strMetadata = (*env)->GetStringUTFChars( env, jstrMetadata, NULL );
nMetadataLen = strlen( strMetadata );
}
jstring jstrParams =
(jstring)(*env)->GetObjectField(env, jMsg, g_FieldParams );
if( NULL != jstrParams )
{
strParams = (*env)->GetStringUTFChars( env, jstrParams, NULL );
nParamsLen = strlen( strParams );
}
jstring jstrParams =
(jstring)(*env)->GetObjectField(env, jMsg, g_FieldParams );
if( NULL != jstrParams )
{
strParams = (*env)->GetStringUTFChars( env, jstrParams, NULL );
nParamsLen = strlen( strParams );
}
nStatus = sbus_send_msg( strSBusPath,
pFiles, nFiles,
strMetadata, nMetadataLen,
strParams, nParamsLen );
nStatus = sbus_send_msg( strSBusPath,
pFiles, nFiles,
strMetadata, nMetadataLen,
strParams, nParamsLen );
if( NULL != jstrMetadata )
(*env)->ReleaseStringUTFChars( env, jstrMetadata, strMetadata );
if( NULL != jstrParams )
(*env)->ReleaseStringUTFChars( env, jstrParams, strParams );
if( NULL != jFileDscrArr )
free( pFiles );
if( NULL != jstrMetadata )
(*env)->ReleaseStringUTFChars( env, jstrMetadata, strMetadata );
if( NULL != jstrParams )
(*env)->ReleaseStringUTFChars( env, jstrParams, strParams );
if( NULL != jFileDscrArr )
free( pFiles );
return nStatus;
return nStatus;
}
@ -248,90 +248,90 @@ Java_org_openstack_storlet_sbus_SBusJNI_sendRawMessage( JNIEnv* env,
*
* */
JNIEXPORT jobject JNICALL
Java_org_openstack_storlet_sbus_SBusJNI_receiveRawMessage( JNIEnv* env,
jobject obj,
jint jnBus )
Java_org_openstack_storlet_sbus_SBusJNI_receiveRawMessage( JNIEnv* env,
jobject obj,
jint jnBus )
{
syslog( LOG_DEBUG, "JNI: Inside receiveRawMessage" );
if( init_java_accessors( env ) )
return NULL;
syslog( LOG_DEBUG, "JNI: Inside receiveRawMessage" );
if( init_java_accessors( env ) )
return NULL;
int i,j;
int nStatus = 0;
jobject RawMsgObj = 0;
int i,j;
int nStatus = 0;
jobject RawMsgObj = 0;
char* strMetadata = 0;
int nMetadataLen = 0;
char* strParams = 0;
int nParamsLen = 0;
int* pFiles = 0;
int nFiles = 0;
char* strMetadata = 0;
int nMetadataLen = 0;
char* strParams = 0;
int nParamsLen = 0;
int* pFiles = 0;
int nFiles = 0;
nStatus = sbus_recv_msg( jnBus,
&pFiles, &nFiles,
&strMetadata, &nMetadataLen,
&strParams, &nParamsLen );
nStatus = sbus_recv_msg( jnBus,
&pFiles, &nFiles,
&strMetadata, &nMetadataLen,
&strParams, &nParamsLen );
syslog( LOG_DEBUG, "JNI: sbus_recv_msg = %d, "
"nFiles = %d, "
"nMetadataLen = %d, "
"nParamsLen = %d",
nStatus, nFiles, nMetadataLen, nParamsLen );
syslog( LOG_DEBUG, "JNI: sbus_recv_msg = %d, "
"nFiles = %d, "
"nMetadataLen = %d, "
"nParamsLen = %d",
nStatus, nFiles, nMetadataLen, nParamsLen );
if( 0 <= nStatus )
{
strParams[nParamsLen] = '\0';
// Create result object
RawMsgObj = (*env)->NewObject( env,
g_ClassRawMessage,
g_RawMessageCTOR );
if( 0 <= nStatus )
{
strParams[nParamsLen] = '\0';
// Create result object
RawMsgObj = (*env)->NewObject( env,
g_ClassRawMessage,
g_RawMessageCTOR );
// Params is never empty. We have 'command' at least.
jstring jstrParams = (*env)->NewStringUTF( env, strParams );
(*env)->SetObjectField( env, RawMsgObj, g_FieldParams, jstrParams );
// Params is never empty. We have 'command' at least.
jstring jstrParams = (*env)->NewStringUTF( env, strParams );
(*env)->SetObjectField( env, RawMsgObj, g_FieldParams, jstrParams );
if( 0 < nFiles )
{
strMetadata[nMetadataLen] = '\0';
// Instantiate FileDescriptor array
jobjectArray jFileDscrArr =
(*env)->NewObjectArray( env, nFiles,
g_ClassFileDescriptor,
NULL );
jobject jFileDscr;
for( i = 0; i < nFiles; ++i )
{
jFileDscr = (*env)->NewObject( env,
g_ClassFileDescriptor,
g_FDCTOR );
(*env)->SetIntField( env,
jFileDscr,
g_FieldRawFD,
pFiles[i] );
(*env)->SetObjectArrayElement( env,
jFileDscrArr,
i,
jFileDscr );
}
// Assign obtained object
(*env)->SetObjectField(env,RawMsgObj, g_FieldFDs, jFileDscrArr );
if( 0 < nFiles )
{
strMetadata[nMetadataLen] = '\0';
// Instantiate FileDescriptor array
jobjectArray jFileDscrArr =
(*env)->NewObjectArray( env, nFiles,
g_ClassFileDescriptor,
NULL );
jobject jFileDscr;
for( i = 0; i < nFiles; ++i )
{
jFileDscr = (*env)->NewObject( env,
g_ClassFileDescriptor,
g_FDCTOR );
(*env)->SetIntField( env,
jFileDscr,
g_FieldRawFD,
pFiles[i] );
(*env)->SetObjectArrayElement( env,
jFileDscrArr,
i,
jFileDscr );
}
// Assign obtained object
(*env)->SetObjectField(env,RawMsgObj, g_FieldFDs, jFileDscrArr );
jstring jstrMetadata = (*env)->NewStringUTF(env, strMetadata );
(*env)->SetObjectField( env,
RawMsgObj,
g_FieldMetadata,
jstrMetadata );
}
syslog(LOG_DEBUG, "receiveRawMessage: %d files", nFiles );
jstring jstrMetadata = (*env)->NewStringUTF(env, strMetadata );
(*env)->SetObjectField( env,
RawMsgObj,
g_FieldMetadata,
jstrMetadata );
}
syslog(LOG_DEBUG, "receiveRawMessage: %d files", nFiles );
}
// Clean up
free( pFiles );
free( strMetadata );
free( strParams );
}
// Clean up
free( pFiles );
free( strMetadata );
free( strParams );
return RawMsgObj;
return RawMsgObj;
}
/*============================== END OF FILE ===============================*/

View File

@ -14,47 +14,46 @@
-->
<project>
<target name="clean">
<delete dir="bin" />
<delete file="org_openstack_storlet_sbus_SBusJNI.h"/>
</target>
<target name="clean">
<delete dir="bin" />
<delete file="org_openstack_storlet_sbus_SBusJNI.h"/>
</target>
<target name="java">
<mkdir dir="bin" />
<javac
srcdir="src"
destdir="bin"
classpath="../../dependencies/json_simple-1.1.jar"
includeantruntime="false"/>
</target>
<target name="java">
<mkdir dir="bin" />
<javac
srcdir="src"
destdir="bin"
classpath="../../dependencies/json_simple-1.1.jar"
includeantruntime="false"/>
</target>
<target name="h" depends="java">
<javah destdir="." force="yes" classpath="bin">
<class name="org.openstack.storlet.sbus.SBusJNI" />
</javah>
</target>
<target name="h" depends="java">
<javah destdir="." force="yes" classpath="bin">
<class name="org.openstack.storlet.sbus.SBusJNI" />
</javah>
</target>
<target name="so" depends="h">
<exec dir="." executable="gcc">
<arg line="-shared -o bin/libjsbus.so -fPIC" />
<arg line="-I/usr/lib/jvm/java-8-openjdk-amd64/include/" />
<arg line="-I/usr/lib/jvm/java-8-openjdk-amd64/include/linux/" />
<arg line="-I../SBusTransportLayer" />
<arg line="SBusJNI.c ../SBusTransportLayer/sbus.c" />
</exec>
</target>
<target name="so" depends="h">
<exec dir="." executable="gcc">
<arg line="-shared -o bin/libjsbus.so -fPIC" />
<arg line="-I/usr/lib/jvm/java-8-openjdk-amd64/include/" />
<arg line="-I/usr/lib/jvm/java-8-openjdk-amd64/include/linux/" />
<arg line="-I../SBusTransportLayer" />
<arg line="SBusJNI.c ../SBusTransportLayer/sbus.c" />
</exec>
</target>
<target name="jar" depends="so">
<jar destfile="SBusJavaFacade.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.sbus.SBus" />
</manifest>
</jar>
<move file="SBusJavaFacade.jar" todir="bin/"/>
</target>
<target name="jar" depends="so">
<jar destfile="SBusJavaFacade.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.sbus.SBus" />
</manifest>
</jar>
<move file="SBusJavaFacade.jar" todir="bin/"/>
</target>
<target name="build" depends="clean, jar">
</target>
</project>

View File

@ -29,63 +29,63 @@ import org.openstack.storlet.sbus.SBusBackend.eLogLevel;
* The front end Java class for SBus functionality.
* */
public class SBus {
private SBusHandler hServerSideSBus_;
private SBusBackend SBusBack_;
private SBusHandler hServerSideSBus_;
private SBusBackend SBusBack_;
/*------------------------------------------------------------------------
* CTOR
*
* Instantiate the SBusBackend object. Start logging
* */
public SBus(final String contId) throws IOException {
SBusBack_ = new SBusBackend();
SBusBack_.startLogger(eLogLevel.SBUS_LOG_DEBUG, contId);
}
/*------------------------------------------------------------------------
* CTOR
*
* Instantiate the SBusBackend object. Start logging
* */
public SBus(final String contId) throws IOException {
SBusBack_ = new SBusBackend();
SBusBack_.startLogger(eLogLevel.SBUS_LOG_DEBUG, contId);
}
/*------------------------------------------------------------------------
* create
*
* Initialize the server side SBus
* */
public void create(final String strPath) throws IOException {
hServerSideSBus_ = SBusBack_.createSBus(strPath);
}
/*------------------------------------------------------------------------
* create
*
* Initialize the server side SBus
* */
public void create(final String strPath) throws IOException {
hServerSideSBus_ = SBusBack_.createSBus(strPath);
}
/*------------------------------------------------------------------------
* listen
*
* Listen to the SBus. Suspend the executing thread
* */
public void listen() throws IOException {
SBusBack_.listenSBus(hServerSideSBus_);
}
/*------------------------------------------------------------------------
* listen
*
* Listen to the SBus. Suspend the executing thread
* */
public void listen() throws IOException {
SBusBack_.listenSBus(hServerSideSBus_);
}
/*------------------------------------------------------------------------
* receive
* */
public ServerSBusInDatagram receive() throws Exception {
SBusRawMessage Msg = SBusBack_.receiveRawMessage(hServerSideSBus_);
ServerSBusInDatagram Dtg = new ServerSBusInDatagram(Msg);
return Dtg;
}
/*------------------------------------------------------------------------
* receive
* */
public ServerSBusInDatagram receive() throws Exception {
SBusRawMessage Msg = SBusBack_.receiveRawMessage(hServerSideSBus_);
ServerSBusInDatagram Dtg = new ServerSBusInDatagram(Msg);
return Dtg;
}
/*------------------------------------------------------------------------
* send
* */
public void send(final String strSBusPath, final ServerSBusOutDatagram Dtg)
throws IOException {
/*------------------------------------------------------------------------
* send
* */
public void send(final String strSBusPath, final ServerSBusOutDatagram Dtg)
throws IOException {
SBusRawMessage Msg = Dtg.toRawMessage();
SBusBack_.sendRawMessage(strSBusPath, Msg);
}
SBusRawMessage Msg = Dtg.toRawMessage();
SBusBack_.sendRawMessage(strSBusPath, Msg);
}
/*------------------------------------------------------------------------
* DTOR
*
* Stop logging
* */
public void finalize() {
SBusBack_.stopLogger();
}
/*------------------------------------------------------------------------
* DTOR
*
* Stop logging
* */
public void finalize() {
SBusBack_.stopLogger();
}
}
/* ============================== END OF FILE =============================== */

View File

@ -15,9 +15,9 @@
*/
/*============================================================================
DD-MMM-2014 eranr Initial implementation as sChannel.
DD-MMM-2014 eranr Initial implementation as sChannel.
Introducing wrapping structures.
30-Jun-2014 evgenyl Switching to SBus. Code refactoring.
30-Jun-2014 evgenyl Switching to SBus. Code refactoring.
Simplifying API. Extracting business logic.
===========================================================================*/
@ -28,98 +28,98 @@ import java.io.IOException;
/*----------------------------------------------------------------------------
* SBusBackend
*
* This class wraps and transfers calls to the JNI implementation
* This class wraps and transfers calls to the JNI implementation
* */
public class SBusBackend {
/*------------------------------------------------------------------------
* JNI layer delegate, common to every instance of SBusBackend
* */
private static SBusJNI SBusJNIObj_ = new SBusJNI();
/*------------------------------------------------------------------------
* JNI layer delegate, common to every instance of SBusBackend
* */
private static SBusJNI SBusJNIObj_ = new SBusJNI();
/*------------------------------------------------------------------------
* Enumerating logging levels
* The values are suitable to syslog constants
* */
public static enum eLogLevel {
SBUS_LOG_DEBUG, SBUS_LOG_INFO, SBUS_LOG_WARNING, SBUS_LOG_CRITICAL, SBUS_LOG_OFF
};
/*------------------------------------------------------------------------
* Enumerating logging levels
* The values are suitable to syslog constants
* */
public static enum eLogLevel {
SBUS_LOG_DEBUG, SBUS_LOG_INFO, SBUS_LOG_WARNING, SBUS_LOG_CRITICAL, SBUS_LOG_OFF
};
/*------------------------------------------------------------------------
* Initiate logging with the required detail level
* */
public void startLogger(eLogLevel eLogLevel, String contId) {
String strLogLevel = null;
switch (eLogLevel) {
case SBUS_LOG_DEBUG:
strLogLevel = "DEBUG";
break;
case SBUS_LOG_INFO:
strLogLevel = "INFO";
break;
case SBUS_LOG_WARNING:
strLogLevel = "WARNING";
break;
case SBUS_LOG_CRITICAL:
strLogLevel = "CRITICAL";
break;
case SBUS_LOG_OFF:
strLogLevel = "OFF";
break;
default:
strLogLevel = "WARNINIG";
break;
}
SBusJNIObj_.startLogger(strLogLevel, contId);
}
/*------------------------------------------------------------------------
* Initiate logging with the required detail level
* */
public void startLogger(eLogLevel eLogLevel, String contId) {
String strLogLevel = null;
switch (eLogLevel) {
case SBUS_LOG_DEBUG:
strLogLevel = "DEBUG";
break;
case SBUS_LOG_INFO:
strLogLevel = "INFO";
break;
case SBUS_LOG_WARNING:
strLogLevel = "WARNING";
break;
case SBUS_LOG_CRITICAL:
strLogLevel = "CRITICAL";
break;
case SBUS_LOG_OFF:
strLogLevel = "OFF";
break;
default:
strLogLevel = "WARNINIG";
break;
}
SBusJNIObj_.startLogger(strLogLevel, contId);
}
/*------------------------------------------------------------------------
* Stop logging
* */
public void stopLogger() {
SBusJNIObj_.stopLogger();
}
/*------------------------------------------------------------------------
* Stop logging
* */
public void stopLogger() {
SBusJNIObj_.stopLogger();
}
/*------------------------------------------------------------------------
* Create the bus.
* */
public SBusHandler createSBus(final String strSBusName) throws IOException {
int nSBus = SBusJNIObj_.createSBus(strSBusName);
if (0 > nSBus)
throw new IOException("Unable to create SBus - " + strSBusName);
return new SBusHandler(nSBus);
}
/*------------------------------------------------------------------------
* Create the bus.
* */
public SBusHandler createSBus(final String strSBusName) throws IOException {
int nSBus = SBusJNIObj_.createSBus(strSBusName);
if (0 > nSBus)
throw new IOException("Unable to create SBus - " + strSBusName);
return new SBusHandler(nSBus);
}
/*------------------------------------------------------------------------
* Wait and listen to the bus.
* The executing thread is suspended until some data arrives.
* */
public boolean listenSBus(final SBusHandler hSBus) throws IOException {
int nStatus = SBusJNIObj_.listenSBus(hSBus.getFD());
if (0 > nStatus)
throw new IOException("Unable to listen to SBus");
return true;
}
/*------------------------------------------------------------------------
* Wait and listen to the bus.
* The executing thread is suspended until some data arrives.
* */
public boolean listenSBus(final SBusHandler hSBus) throws IOException {
int nStatus = SBusJNIObj_.listenSBus(hSBus.getFD());
if (0 > nStatus)
throw new IOException("Unable to listen to SBus");
return true;
}
/*------------------------------------------------------------------------
* Take the message and send it.
* */
public int sendRawMessage(final String strBusName, final SBusRawMessage Msg)
throws IOException {
int nStatus = SBusJNIObj_.sendRawMessage(strBusName, Msg);
if (0 > nStatus)
throw new IOException("Unable to send message");
return nStatus;
}
/*------------------------------------------------------------------------
* Take the message and send it.
* */
public int sendRawMessage(final String strBusName, final SBusRawMessage Msg)
throws IOException {
int nStatus = SBusJNIObj_.sendRawMessage(strBusName, Msg);
if (0 > nStatus)
throw new IOException("Unable to send message");
return nStatus;
}
/*------------------------------------------------------------------------
* Read some actual raw data from the bus
* */
public SBusRawMessage receiveRawMessage(final SBusHandler hSBus)
throws IOException {
SBusRawMessage Msg = SBusJNIObj_.receiveRawMessage(hSBus.getFD());
if (null == Msg)
throw new IOException("Unable to retrieve a message");
return Msg;
}
/*------------------------------------------------------------------------
* Read some actual raw data from the bus
* */
public SBusRawMessage receiveRawMessage(final SBusHandler hSBus)
throws IOException {
SBusRawMessage Msg = SBusJNIObj_.receiveRawMessage(hSBus.getFD());
if (null == Msg)
throw new IOException("Unable to retrieve a message");
return Msg;
}
}

View File

@ -15,43 +15,43 @@
*/
/*============================================================================
DD-MMM-2014 eranr Initial implementation as sChannel.
DD-MMM-2014 eranr Initial implementation as sChannel.
Introducing wrapping structures.
30-Jun-2014 evgenyl Switching to SBus. Code refactoring.
30-Jun-2014 evgenyl Switching to SBus. Code refactoring.
Simplifying API. Extracting business logic.
===========================================================================*/
package org.openstack.storlet.sbus;
/*----------------------------------------------------------------------------
* This class encapsulates OS level file descriptor used
* in Transport Layer APIs.
* This class encapsulates OS level file descriptor used
* in Transport Layer APIs.
* */
public class SBusHandler {
private int nFD_;
private int nFD_;
/*------------------------------------------------------------------------
* CTOR
* No default value
* */
public SBusHandler(int nFD) {
nFD_ = nFD;
}
/*------------------------------------------------------------------------
* CTOR
* No default value
* */
public SBusHandler(int nFD) {
nFD_ = nFD;
}
/*------------------------------------------------------------------------
* Getter
* */
public int getFD() {
return nFD_;
}
/*------------------------------------------------------------------------
* Getter
* */
public int getFD() {
return nFD_;
}
/*------------------------------------------------------------------------
* Validity
* */
public boolean isValid() {
return (0 <= getFD());
}
/*------------------------------------------------------------------------
* Validity
* */
public boolean isValid() {
return (0 <= getFD());
}
}
/* ============================== END OF FILE =============================== */

View File

@ -15,9 +15,9 @@
*/
/*============================================================================
DD-MMM-2014 eranr Initial implementation as sChannel.
DD-MMM-2014 eranr Initial implementation as sChannel.
Introducing wrapping structures.
30-Jun-2014 evgenyl Switching to SBus. Code refactoring.
30-Jun-2014 evgenyl Switching to SBus. Code refactoring.
Simplifying API. Extracting business logic.
===========================================================================*/
@ -30,21 +30,21 @@ package org.openstack.storlet.sbus;
* See SBusJNI.c for the implementation
* */
public class SBusJNI {
static {
System.loadLibrary("jsbus");
}
static {
System.loadLibrary("jsbus");
}
public native void startLogger(final String strLogLevel, final String contId);
public native void startLogger(final String strLogLevel, final String contId);
public native void stopLogger();
public native void stopLogger();
public native int createSBus(final String strBusName);
public native int createSBus(final String strBusName);
public native int listenSBus(int nBus);
public native int listenSBus(int nBus);
public native int sendRawMessage(final String strBusName,
final SBusRawMessage Msg);
public native int sendRawMessage(final String strBusName,
final SBusRawMessage Msg);
public native SBusRawMessage receiveRawMessage(int nBus);
public native SBusRawMessage receiveRawMessage(int nBus);
}
/* ============================== END OF FILE =============================== */

View File

@ -28,58 +28,58 @@ import java.io.FileDescriptor;
* SBusRawMessage
*
* This class aggregates the data which is sent through SBus.
* No logic is implemented here.
* No logic is implemented here.
* */
public class SBusRawMessage {
/*------------------------------------------------------------------------
* Data Fields
* */
/*------------------------------------------------------------------------
* Data Fields
* */
// Array of open file descriptors (FDs)
private FileDescriptor[] hFiles_;
// Array of open file descriptors (FDs)
private FileDescriptor[] hFiles_;
// JSON-encoded string describing the FDs
private String strMetadata_;
// JSON-encoded string describing the FDs
private String strMetadata_;
// JSON-encoded string with additional information
// for storlet execution
private String strParams_;
// JSON-encoded string with additional information
// for storlet execution
private String strParams_;
/*------------------------------------------------------------------------
* Default CTOR
* */
public SBusRawMessage() {
hFiles_ = null;
strMetadata_ = null;
strParams_ = null;
}
/*------------------------------------------------------------------------
* Default CTOR
* */
public SBusRawMessage() {
hFiles_ = null;
strMetadata_ = null;
strParams_ = null;
}
/*------------------------------------------------------------------------
* Setters/getters
* */
public FileDescriptor[] getFiles() {
return hFiles_;
}
/*------------------------------------------------------------------------
* Setters/getters
* */
public FileDescriptor[] getFiles() {
return hFiles_;
}
public void setFiles(FileDescriptor[] hFiles) {
this.hFiles_ = hFiles;
}
public void setFiles(FileDescriptor[] hFiles) {
this.hFiles_ = hFiles;
}
public String getMetadata() {
return strMetadata_;
}
public String getMetadata() {
return strMetadata_;
}
public void setMetadata(String strMetadata) {
this.strMetadata_ = strMetadata;
}
public void setMetadata(String strMetadata) {
this.strMetadata_ = strMetadata;
}
public String getParams() {
return strParams_;
}
public String getParams() {
return strParams_;
}
public void setParams(String strParams) {
this.strParams_ = strParams;
}
public void setParams(String strParams) {
this.strParams_ = strParams;
}
}
/* ============================== END OF FILE =============================== */

View File

@ -43,100 +43,100 @@ import org.json.simple.JSONArray;
public class ServerSBusInDatagram {
private int numFDs;
private FileDescriptor[] fds;
private String command;
private HashMap<String, String> params;
private HashMap<String, HashMap<String, String>>[] metadata;
private String taskID;
private int numFDs;
private FileDescriptor[] fds;
private String command;
private HashMap<String, String> params;
private HashMap<String, HashMap<String, String>>[] metadata;
private String taskID;
private void populateMetadata(HashMap<String, String> dest, JSONObject source) throws ParseException {
for (Object key : source.keySet()) {
String strKey = (String)key;
String strVal = (String)source.get(key);
dest.put(strKey, strVal);
}
}
private void populateMetadata(HashMap<String, String> dest, JSONObject source) throws ParseException {
for (Object key : source.keySet()) {
String strKey = (String)key;
String strVal = (String)source.get(key);
dest.put(strKey, strVal);
}
}
/**
* Parses a raw message coming from the wire.
* The incoming message is constructed by the ClientSBusOutDatagram.
* The message is structured as follows:
* Array of file descriptors, already parsed in SBusRawMessage
* A command related json string of the following structure:
* {
* "command": "command encoded as string",
* "params": { // This element is optional
* "key1": "value1",
* ...
* },
* "task_id": "task id encoded as string" // This element is optional
* }
* File descriptors metadata, encoded as a JSON array with one
* element per file descriptor. The i'th element in the array
* consists of the metadata of the i'th element in the file
* descriptors array:
* [
* {
* "storlets": {
* "type": "the fd type encoded as string", // Mandatory
* ... // Additional optional storlets metadata
* },
* "storage": {
* "metadata key1": "metadata value 1",
* ...
* }
* },
* ...
* ]
* All the values in the above JSON elemens are strings.
* Once constructed the class provides all necessary accessors to the parsed
* fields.
* @param msg the raw mwssage consisting of the string encoded json formats
* @see SBusPythonFacade.ClientSBusOutDatagram the python code that serilializes the datagram
* @see SBusPythonFacade.ServerSBusInDatagram the equivalent python code
*/
public ServerSBusInDatagram(final SBusRawMessage msg) throws ParseException {
this.fds = msg.getFiles();
/**
* Parses a raw message coming from the wire.
* The incoming message is constructed by the ClientSBusOutDatagram.
* The message is structured as follows:
* Array of file descriptors, already parsed in SBusRawMessage
* A command related json string of the following structure:
* {
* "command": "command encoded as string",
* "params": { // This element is optional
* "key1": "value1",
* ...
* },
* "task_id": "task id encoded as string" // This element is optional
* }
* File descriptors metadata, encoded as a JSON array with one
* element per file descriptor. The i'th element in the array
* consists of the metadata of the i'th element in the file
* descriptors array:
* [
* {
* "storlets": {
* "type": "the fd type encoded as string", // Mandatory
* ... // Additional optional storlets metadata
* },
* "storage": {
* "metadata key1": "metadata value 1",
* ...
* }
* },
* ...
* ]
* All the values in the above JSON elemens are strings.
* Once constructed the class provides all necessary accessors to the parsed
* fields.
* @param msg the raw mwssage consisting of the string encoded json formats
* @see SBusPythonFacade.ClientSBusOutDatagram the python code that serilializes the datagram
* @see SBusPythonFacade.ServerSBusInDatagram the equivalent python code
*/
public ServerSBusInDatagram(final SBusRawMessage msg) throws ParseException {
this.fds = msg.getFiles();
numFDs = this.fds == null ? 0 : this.fds.length;
JSONObject jsonCmdParams = (JSONObject)(new JSONParser().parse(msg.getParams()));
this.command = (String)jsonCmdParams.get("command");
this.params = new HashMap<String, String>();
if (jsonCmdParams.containsKey("params")) {
JSONObject jsonParams = (JSONObject)jsonCmdParams.get("params");
for (Object key : jsonParams.keySet()) {
this.params.put((String)key, (String)jsonParams.get(key));
}
}
if (jsonCmdParams.containsKey("task_id")) {
this.taskID = (String)jsonCmdParams.get("task_id");
}
JSONObject jsonCmdParams = (JSONObject)(new JSONParser().parse(msg.getParams()));
this.command = (String)jsonCmdParams.get("command");
this.params = new HashMap<String, String>();
if (jsonCmdParams.containsKey("params")) {
JSONObject jsonParams = (JSONObject)jsonCmdParams.get("params");
for (Object key : jsonParams.keySet()) {
this.params.put((String)key, (String)jsonParams.get(key));
}
}
if (jsonCmdParams.containsKey("task_id")) {
this.taskID = (String)jsonCmdParams.get("task_id");
}
String strMD = msg.getMetadata();
String strMD = msg.getMetadata();
this.metadata = (HashMap<String, HashMap<String, String>>[])new HashMap[getNFiles()];
JSONArray jsonarray = (JSONArray)(new JSONParser().parse(strMD));
Iterator it = jsonarray.iterator();
int i=0;
while (it.hasNext()) {
this.metadata[i] = new HashMap<String, HashMap<String, String>>();
HashMap<String, String> storletsMetadata = new HashMap<String, String>();
HashMap<String, String> storageMetadata = new HashMap<String, String>();
JSONObject jsonobject = (JSONObject)it.next();
if (jsonobject.containsKey("storage")) {
populateMetadata(storageMetadata, (JSONObject)jsonobject.get("storage"));
}
if (!jsonobject.containsKey("storlets")) {
} else {
populateMetadata(storletsMetadata, (JSONObject)jsonobject.get("storlets"));
}
this.metadata[i].put("storage", storageMetadata);
this.metadata[i].put("storlets", storletsMetadata);
i++;
}
}
JSONArray jsonarray = (JSONArray)(new JSONParser().parse(strMD));
Iterator it = jsonarray.iterator();
int i=0;
while (it.hasNext()) {
this.metadata[i] = new HashMap<String, HashMap<String, String>>();
HashMap<String, String> storletsMetadata = new HashMap<String, String>();
HashMap<String, String> storageMetadata = new HashMap<String, String>();
JSONObject jsonobject = (JSONObject)it.next();
if (jsonobject.containsKey("storage")) {
populateMetadata(storageMetadata, (JSONObject)jsonobject.get("storage"));
}
if (!jsonobject.containsKey("storlets")) {
} else {
populateMetadata(storletsMetadata, (JSONObject)jsonobject.get("storlets"));
}
this.metadata[i].put("storage", storageMetadata);
this.metadata[i].put("storlets", storletsMetadata);
i++;
}
}
public FileDescriptor[] getFiles() {
public FileDescriptor[] getFiles() {
return fds;
}
@ -152,11 +152,11 @@ public class ServerSBusInDatagram {
return params;
}
public String getTaskId() {
return taskID;
}
public String getTaskId() {
return taskID;
}
public HashMap<String, HashMap<String, String>>[] getFilesMetadata() {
return metadata;
}
public HashMap<String, HashMap<String, String>>[] getFilesMetadata() {
return metadata;
}
}

View File

@ -38,10 +38,10 @@ package org.openstack.storlet.sbus;
*/
public class ServerSBusOutDatagram {
public ServerSBusOutDatagram() {
}
public ServerSBusOutDatagram() {
}
public SBusRawMessage toRawMessage() {
return new SBusRawMessage();
}
public SBusRawMessage toRawMessage() {
return new SBusRawMessage();
}
}

View File

@ -15,17 +15,17 @@
<project>
<target name="clean">
<delete dir="build" />
<delete dir="dist" />
<delete dir="SBusPythonFacade.egg-info" />
</target>
<target name="build">
<exec executable="python" dir="." failonerror="true">
<arg value="setup.py"/>
<arg value="bdist"/>
</exec>
</target>
<target name="clean">
<delete dir="build" />
<delete dir="dist" />
<delete dir="SBusPythonFacade.egg-info" />
</target>
<target name="build">
<exec executable="python" dir="." failonerror="true">
<arg value="setup.py"/>
<arg value="bdist"/>
</exec>
</target>
</project>

View File

@ -14,17 +14,17 @@
-->
<project>
<target name="so">
<target name="so">
<mkdir dir="bin" />
<exec dir="." executable="gcc">
<arg line="-shared -o bin/sbus.so sbus.c -O2 -fPIC" />
</exec>
</target>
<exec dir="." executable="gcc">
<arg line="-shared -o bin/sbus.so sbus.c -O2 -fPIC" />
</exec>
</target>
<target name="clean">
<target name="clean">
<delete dir="bin" />
</target>
</target>
<target name="build" depends="so">
</target>
<target name="build" depends="so">
</target>
</project>

View File

@ -123,14 +123,14 @@ int sbus_create( const char* str_sbus_path )
close(n_sbus_handle);
}
char mode[] = "0777";
char mode[] = "0777";
n_status = chmod( str_sbus_path, strtol(mode, 0, 8) );
if( 0 != n_status )
{
syslog( LOG_ERR,
"sbus_create: Failed to set socket permissions. %s",
strerror(errno) );
close(n_sbus_handle);
syslog( LOG_ERR,
"sbus_create: Failed to set socket permissions. %s",
strerror(errno) );
close(n_sbus_handle);
}
int nReuse = 1;
@ -226,12 +226,12 @@ int dump_data_to_bytestream( char** pp_bytestream,
int n_offset = 0;
memcpy( *pp_bytestream + n_offset, (void*) &n_files, int_size );
n_offset += int_size;
memcpy( *pp_bytestream + n_offset, (void*) &n_files_metadata_len,
memcpy( *pp_bytestream + n_offset, (void*) &n_files_metadata_len,
int_size );
n_offset += int_size;
memcpy( *pp_bytestream + n_offset, (void*) &n_msg_len, int_size );
n_offset += int_size;
memcpy( *pp_bytestream + n_offset, (void*) str_files_metadata,
memcpy( *pp_bytestream + n_offset, (void*) str_files_metadata,
n_files_metadata_len );
n_offset += n_files_metadata_len;
memcpy( *pp_bytestream + n_offset, (void*) str_msg_data, n_msg_len );
@ -327,7 +327,7 @@ int sbus_send_msg( const char* str_sbus_path,
n_files_metadata_len,
str_msg_data,
n_msg_len );
if( 0 > n_status ) {
close( n_sock );
} else {
@ -375,7 +375,7 @@ int sbus_extract_integer( const char* p_str )
* Caller shall free the allocated chunk.
*/
static
char* sbus_copy_substr( const char* p_src,
char* sbus_copy_substr( const char* p_src,
int n_len )
{
char* p_dst = (char*) malloc( n_len + 1 );
@ -390,7 +390,7 @@ char* sbus_copy_substr( const char* p_src,
* Caller shall free the allocated chunk.
*/
static
int sbus_extract_files( struct msghdr* p_msg,
int sbus_extract_files( struct msghdr* p_msg,
int n_files,
int** pp_files )
{
@ -466,8 +466,8 @@ int sbus_recv_msg( int n_sbus_handler,
int n_msg_len = recvmsg( n_sbus_handler, &recv_msg, 0 );
if( n_msg_len < 0 ) {
syslog(LOG_ERR, "sbus_recv_msg: recvmsg failed. %s", strerror(errno));
close(n_sbus_handler);
n_status = -1;
close(n_sbus_handler);
n_status = -1;
}
if( 0 <= n_status ) {

View File

@ -15,42 +15,42 @@
<project>
<target name="clean">
<delete dir="bin" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin" includeantruntime="false">
<classpath>
<pathelement path="../SBus/SBusJavaFacade/bin/SBusJavaFacade.jar"/>
<pathelement path="../dependencies/json_simple-1.1.jar"/>
<pathelement path="../dependencies/slf4j-api-1.7.7.jar"/>
<pathelement path="../dependencies/logback-classic-1.1.2.jar"/>
<pathelement path="../dependencies/logback-core-1.1.2.jar"/>
<pathelement path="."/>
</classpath>
</javac>
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin" includeantruntime="false">
<classpath>
<pathelement path="../SBus/SBusJavaFacade/bin/SBusJavaFacade.jar"/>
<pathelement path="../dependencies/json_simple-1.1.jar"/>
<pathelement path="../dependencies/slf4j-api-1.7.7.jar"/>
<pathelement path="../dependencies/logback-classic-1.1.2.jar"/>
<pathelement path="../dependencies/logback-core-1.1.2.jar"/>
<pathelement path="."/>
</classpath>
</javac>
</target>
<target name="jar" depends="java">
<jar destfile="SCommon.jar" basedir="bin">
<manifest>
<attribute name="Main-Class" value="org.openstack.storlet.daemon.IStorlet" />
</manifest>
</jar>
<move file="SCommon.jar" todir="bin/" />
</target>
<target name="jar" depends="java">
<jar destfile="SCommon.jar" basedir="bin">
<manifest>
<attribute name="Main-Class" value="org.openstack.storlet.daemon.IStorlet" />
</manifest>
</jar>
<move file="SCommon.jar" todir="bin/" />
</target>
<target name="test">
<javac srcdir="test" destdir="bin" includeantruntime="false">
<classpath>
<pathelement path="."/>
</classpath>
</javac>
</target>
<target name="test">
<javac srcdir="test" destdir="bin" includeantruntime="false">
<classpath>
<pathelement path="."/>
</classpath>
</javac>
</target>
<target name="build" depends="clean, jar, test">
</target>
<target name="build" depends="clean, jar, test">
</target>
</project>

View File

@ -23,8 +23,8 @@ import java.util.ArrayList;
import java.util.Map;
public interface IStorlet {
public void invoke(ArrayList<StorletInputStream> inStreams,
ArrayList<StorletOutputStream> outStreams,
Map<String, String> parameters, StorletLogger logger)
throws StorletException;
public void invoke(ArrayList<StorletInputStream> inStreams,
ArrayList<StorletOutputStream> outStreams,
Map<String, String> parameters, StorletLogger logger)
throws StorletException;
}

View File

@ -20,19 +20,19 @@
package org.openstack.storlet.common;
public class ObjectRequestEntry {
private StorletObjectOutputStream objectStream = null;
private StorletObjectOutputStream objectStream = null;
public synchronized StorletObjectOutputStream get()
throws InterruptedException {
if (objectStream == null)
wait();
public synchronized StorletObjectOutputStream get()
throws InterruptedException {
if (objectStream == null)
wait();
return objectStream;
}
return objectStream;
}
public synchronized void put(StorletObjectOutputStream objectStream)
throws InterruptedException {
this.objectStream = objectStream;
notify();
}
public synchronized void put(StorletObjectOutputStream objectStream)
throws InterruptedException {
this.objectStream = objectStream;
notify();
}
}

View File

@ -22,27 +22,27 @@ package org.openstack.storlet.common;
import java.util.HashMap;
public class ObjectRequestsTable {
private HashMap<String, ObjectRequestEntry> requestsTable;
private HashMap<String, ObjectRequestEntry> requestsTable;
public ObjectRequestsTable() {
requestsTable = new HashMap<String, ObjectRequestEntry>();
}
public ObjectRequestsTable() {
requestsTable = new HashMap<String, ObjectRequestEntry>();
}
public ObjectRequestEntry Insert(String key) {
ObjectRequestEntry requestEntry = new ObjectRequestEntry();
synchronized (requestsTable) {
requestsTable.put(key, requestEntry);
}
return requestEntry;
}
public ObjectRequestEntry Insert(String key) {
ObjectRequestEntry requestEntry = new ObjectRequestEntry();
synchronized (requestsTable) {
requestsTable.put(key, requestEntry);
}
return requestEntry;
}
public ObjectRequestEntry Get(String key) {
return requestsTable.get(key);
}
public ObjectRequestEntry Get(String key) {
return requestsTable.get(key);
}
public void Remove(String key) {
synchronized (requestsTable) {
requestsTable.remove(key);
}
}
public void Remove(String key) {
synchronized (requestsTable) {
requestsTable.remove(key);
}
}
}

View File

@ -27,60 +27,60 @@ import java.util.Date;
import org.json.simple.JSONObject;
public class StorletContainerHandle extends StorletOutputStream {
private String containerName;
private ObjectRequestsTable requestTable;
private String containerName;
private ObjectRequestsTable requestTable;
public StorletContainerHandle(FileDescriptor request_fd,
HashMap<String, String> request_md, ObjectRequestsTable requestTable)
throws StorletException {
super(request_fd, request_md);
this.containerName = request_md.get("storlet_container_name");
if (this.containerName == null)
throw new StorletException(
"StorletContainerHandle init with no container name");
this.requestTable = requestTable;
}
public StorletContainerHandle(FileDescriptor request_fd,
HashMap<String, String> request_md, ObjectRequestsTable requestTable)
throws StorletException {
super(request_fd, request_md);
this.containerName = request_md.get("storlet_container_name");
if (this.containerName == null)
throw new StorletException(
"StorletContainerHandle init with no container name");
this.requestTable = requestTable;
}
public String getName() {
return containerName;
}
public String getName() {
return containerName;
}
@SuppressWarnings("unchecked")
public StorletObjectOutputStream getObjectOutputStream(String objectName)
throws StorletException {
StorletObjectOutputStream objectStream = null;
String key = containerName + objectName + new Date().getTime();
JSONObject jRequestObj = new JSONObject();
jRequestObj.put("object_name", objectName);
jRequestObj.put("container_name", containerName);
jRequestObj.put("key", key);
@SuppressWarnings("unchecked")
public StorletObjectOutputStream getObjectOutputStream(String objectName)
throws StorletException {
StorletObjectOutputStream objectStream = null;
String key = containerName + objectName + new Date().getTime();
JSONObject jRequestObj = new JSONObject();
jRequestObj.put("object_name", objectName);
jRequestObj.put("container_name", containerName);
jRequestObj.put("key", key);
ObjectRequestEntry requestEntry = requestTable.Insert(key);
ObjectRequestEntry requestEntry = requestTable.Insert(key);
try {
stream.write(jRequestObj.toString().getBytes());
} catch (IOException e) {
throw new StorletException(
"Failed to serialize object descriptor request "
+ e.toString());
}
try {
stream.write(jRequestObj.toString().getBytes());
} catch (IOException e) {
throw new StorletException(
"Failed to serialize object descriptor request "
+ e.toString());
}
try {
objectStream = requestEntry.get();
} catch (InterruptedException e) {
throw new StorletException(
"Exception while waiting for request entry"
+ e.getMessage());
}
requestTable.Remove(key);
return objectStream;
}
try {
objectStream = requestEntry.get();
} catch (InterruptedException e) {
throw new StorletException(
"Exception while waiting for request entry"
+ e.getMessage());
}
requestTable.Remove(key);
return objectStream;
}
public void close() {
try {
stream.close();
} catch (IOException e) {
public void close() {
try {
stream.close();
} catch (IOException e) {
}
}
}
}
}

View File

@ -20,12 +20,12 @@
package org.openstack.storlet.common;
public class StorletException extends Exception {
/**
*
*/
private static final long serialVersionUID = 1L;
/**
*
*/
private static final long serialVersionUID = 1L;
public StorletException(String message) {
super(message);
}
public StorletException(String message) {
super(message);
}
}

View File

@ -24,30 +24,30 @@ import java.io.IOException;
import java.util.HashMap;
public class StorletInputStream {
private HashMap<String, String> metadata;
protected InputStream stream;
private HashMap<String, String> metadata;
protected InputStream stream;
public StorletInputStream(FileDescriptor fd, HashMap<String, String> md) {
stream = ((InputStream) (new FileInputStream(fd)));
metadata = md;
}
public StorletInputStream(FileDescriptor fd, HashMap<String, String> md) {
stream = ((InputStream) (new FileInputStream(fd)));
metadata = md;
}
protected StorletInputStream(HashMap<String, String> md) {
metadata = md;
}
protected StorletInputStream(HashMap<String, String> md) {
metadata = md;
}
public HashMap<String, String> getMetadata() {
return metadata;
}
public HashMap<String, String> getMetadata() {
return metadata;
}
public InputStream getStream() {
return stream;
}
public InputStream getStream() {
return stream;
}
public void close() {
try {
stream.close();
} catch (IOException e) {
}
}
public void close() {
try {
stream.close();
} catch (IOException e) {
}
}
}

View File

@ -24,34 +24,34 @@ import java.io.FileOutputStream;
import java.io.IOException;
public class StorletLogger {
private FileOutputStream stream;
private FileOutputStream stream;
public StorletLogger(FileDescriptor fd) {
stream = new FileOutputStream(fd);
}
public StorletLogger(FileDescriptor fd) {
stream = new FileOutputStream(fd);
}
public void emitLog(String message) {
message = message + "\n";
try {
stream.write(message.getBytes());
} catch (IOException e) {
public void emitLog(String message) {
message = message + "\n";
try {
stream.write(message.getBytes());
} catch (IOException e) {
}
}
}
}
public void Flush() {
try {
stream.flush();
} catch (IOException e) {
}
}
public void Flush() {
try {
stream.flush();
} catch (IOException e) {
}
}
public void close() {
Flush();
try {
stream.close();
} catch (IOException e) {
}
}
public void close() {
Flush();
try {
stream.close();
} catch (IOException e) {
}
}
}

View File

@ -31,45 +31,45 @@ import org.json.simple.JSONObject;
public class StorletObjectOutputStream extends StorletOutputStream {
private OutputStream MetadataStream_;
private OutputStream MetadataStream_;
public StorletObjectOutputStream(FileDescriptor data_fd,
HashMap<String, String> data_md, FileDescriptor md_fd) {
super(data_fd, data_md);
MetadataStream_ = ((OutputStream) (new FileOutputStream(md_fd)));
}
public StorletObjectOutputStream(FileDescriptor data_fd,
HashMap<String, String> data_md, FileDescriptor md_fd) {
super(data_fd, data_md);
MetadataStream_ = ((OutputStream) (new FileOutputStream(md_fd)));
}
public OutputStream getStream() {
return stream;
}
public OutputStream getStream() {
return stream;
}
public OutputStream getMDStream() {
return MetadataStream_;
}
public OutputStream getMDStream() {
return MetadataStream_;
}
public void closeMD(){
try{
MetadataStream_.close();
} catch (IOException e) {
}
}
public void closeMD(){
try{
MetadataStream_.close();
} catch (IOException e) {
}
}
@SuppressWarnings("unchecked")
public void setMetadata(Map<String, String> md) throws StorletException {
JSONObject jobj = new JSONObject();
Iterator<Map.Entry<String, String>> it = md.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> pairs = (Map.Entry<String, String>) it
.next();
jobj.put((String) pairs.getKey(), (String) pairs.getValue());
it.remove();
}
try {
MetadataStream_.write(jobj.toString().getBytes());
} catch (IOException e) {
throw new StorletException("Failed to set metadata " + e.toString());
} finally {
closeMD();
}
}
@SuppressWarnings("unchecked")
public void setMetadata(Map<String, String> md) throws StorletException {
JSONObject jobj = new JSONObject();
Iterator<Map.Entry<String, String>> it = md.entrySet().iterator();
while (it.hasNext()) {
Map.Entry<String, String> pairs = (Map.Entry<String, String>) it
.next();
jobj.put((String) pairs.getKey(), (String) pairs.getValue());
it.remove();
}
try {
MetadataStream_.write(jobj.toString().getBytes());
} catch (IOException e) {
throw new StorletException("Failed to set metadata " + e.toString());
} finally {
closeMD();
}
}
}

View File

@ -26,22 +26,22 @@ import java.io.OutputStream;
import java.util.HashMap;
public class StorletOutputStream {
private HashMap<String, String> metadata;
protected OutputStream stream;
private HashMap<String, String> metadata;
protected OutputStream stream;
public StorletOutputStream(FileDescriptor fd, HashMap<String, String> md) {
stream = ((OutputStream) (new FileOutputStream(fd)));
metadata = md;
}
public StorletOutputStream(FileDescriptor fd, HashMap<String, String> md) {
stream = ((OutputStream) (new FileOutputStream(fd)));
metadata = md;
}
public HashMap<String, String> getMetadata() {
return metadata;
}
public HashMap<String, String> getMetadata() {
return metadata;
}
public void close(){
try{
stream.close();
} catch (IOException e) {
}
}
public void close(){
try{
stream.close();
} catch (IOException e) {
}
}
}

View File

@ -20,20 +20,20 @@
package org.openstack.storlet.common;
public class StorletUtils {
public static final String getClassFolder(
@SuppressWarnings("rawtypes") Class o) {
String strResult = "";
String strJarPath = o.getProtectionDomain().getCodeSource()
.getLocation().getPath();
String strSep = java.io.File.separator;
String[] strSubfolders = strJarPath.split(strSep);
// The content of strSubfolders is something like:
// "/home" "swift" "SomeStorlet" "SomeStorlet-1.0.jar"
// The first token contains separator, the last shall be thrown.
strResult = strSubfolders[0];
int nOfSubF = strSubfolders.length - 1;
for (int i = 1; i < nOfSubF; ++i)
strResult = strResult + strSep + strSubfolders[i];
return strResult;
}
public static final String getClassFolder(
@SuppressWarnings("rawtypes") Class o) {
String strResult = "";
String strJarPath = o.getProtectionDomain().getCodeSource()
.getLocation().getPath();
String strSep = java.io.File.separator;
String[] strSubfolders = strJarPath.split(strSep);
// The content of strSubfolders is something like:
// "/home" "swift" "SomeStorlet" "SomeStorlet-1.0.jar"
// The first token contains separator, the last shall be thrown.
strResult = strSubfolders[0];
int nOfSubF = strSubfolders.length - 1;
for (int i = 1; i < nOfSubF; ++i)
strResult = strResult + strSep + strSubfolders[i];
return strResult;
}
}

View File

@ -22,17 +22,17 @@
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin" includeantruntime="false">
<classpath>
<pathelement
<pathelement
path="../SBus/SBusJavaFacade/bin/SBusJavaFacade.jar"/>
<pathelement
<pathelement
path="../SCommon/bin/SCommon.jar"/>
<pathelement
<pathelement
path="../dependencies/json_simple-1.1.jar"/>
<pathelement
<pathelement
path="../dependencies/slf4j-api-1.7.7.jar"/>
<pathelement
<pathelement
path="../dependencies/logback-classic-1.1.2.jar"/>
<pathelement
<pathelement
path="../dependencies/logback-core-1.1.2.jar"/>
<pathelement path="."/>
</classpath>
@ -40,11 +40,11 @@
</target>
<target name="jar" depends="java">
<jar destfile="SDaemon.jar"
<jar destfile="SDaemon.jar"
basedir="bin">
<manifest>
<attribute
name="Main-Class"
<attribute
name="Main-Class"
value="org.openstack.storlet.daemon.SDaemon" />
</manifest>
</jar>

View File

@ -26,10 +26,10 @@ import org.slf4j.Logger;
* */
public class SAbstractTask {
protected Logger logger;
protected Logger logger;
public SAbstractTask(Logger logger) {
this.logger = logger;
}
public SAbstractTask(Logger logger) {
this.logger = logger;
}
}

View File

@ -32,38 +32,38 @@ import org.slf4j.Logger;
* a timeout is encountered)
* */
public class SCancelTask extends SAbstractTask {
private OutputStream sOut_ = null;
private String taskId_ = null;
private OutputStream sOut_ = null;
private String taskId_ = null;
/*------------------------------------------------------------------------
* CTOR
* */
public SCancelTask(OutputStream sOut, Logger logger, String taskId) {
super(logger);
this.sOut_ = sOut;
this.taskId_ = taskId;
}
/*------------------------------------------------------------------------
* CTOR
* */
public SCancelTask(OutputStream sOut, Logger logger, String taskId) {
super(logger);
this.sOut_ = sOut;
this.taskId_ = taskId;
}
public String getTaskId() {
return taskId_;
}
public String getTaskId() {
return taskId_;
}
public OutputStream getSOut() {
return sOut_;
}
public OutputStream getSOut() {
return sOut_;
}
/*------------------------------------------------------------------------
* run
* */
public boolean run() {
boolean bStatus = true;
try {
this.sOut_.write((new String("OK")).getBytes());
} catch (IOException e) {
e.printStackTrace();
bStatus = false;
}
return bStatus;
}
/*------------------------------------------------------------------------
* run
* */
public boolean run() {
boolean bStatus = true;
try {
this.sOut_.write((new String("OK")).getBytes());
} catch (IOException e) {
e.printStackTrace();
bStatus = false;
}
return bStatus;
}
}
/* ============================== END OF FILE =============================== */

View File

@ -35,246 +35,246 @@ import java.util.concurrent.*;
/*----------------------------------------------------------------------------
* SDaemon
*
* This class acts as a language binding and management layer for
* user's Storlet logic implementation(~s?)
* This class acts as a language binding and management layer for
* user's Storlet logic implementation(~s?)
* */
public class SDaemon {
private static Logger logger_;
private static SBus sbus_;
private static STaskFactory storletTaskFactory_;
private static ExecutorService threadPool_;
private static String strStorletName_;
private static HashMap<String, Future> taskIdToTask_;
private static int nDefaultTimeoutToWaitBeforeShutdown_ = 3;
private static Logger logger_;
private static SBus sbus_;
private static STaskFactory storletTaskFactory_;
private static ExecutorService threadPool_;
private static String strStorletName_;
private static HashMap<String, Future> taskIdToTask_;
private static int nDefaultTimeoutToWaitBeforeShutdown_ = 3;
private static boolean initLog(final String strClassName,
final String strLogLevel) {
Level newLevel = Level.toLevel(strLogLevel);
boolean bStatus = true;
try {
logger_ = (ch.qos.logback.classic.Logger) LoggerFactory
.getLogger("StorletDaemon_" + strClassName);
logger_.setLevel(newLevel);
logger_.info("Logger Started");
} catch (Exception e) {
System.err.println("got exception " + e);
bStatus = false;
}
return bStatus;
}
private static boolean initLog(final String strClassName,
final String strLogLevel) {
Level newLevel = Level.toLevel(strLogLevel);
boolean bStatus = true;
try {
logger_ = (ch.qos.logback.classic.Logger) LoggerFactory
.getLogger("StorletDaemon_" + strClassName);
logger_.setLevel(newLevel);
logger_.info("Logger Started");
} catch (Exception e) {
System.err.println("got exception " + e);
bStatus = false;
}
return bStatus;
}
private static IStorlet loadStorlet(final String strStorletClassName) {
IStorlet storlet = null;
try {
Class<?> c = Class.forName(strStorletClassName);
storlet = (IStorlet) c.newInstance();
} catch (Exception e) {
logger_.error(strStorletName_ + ": Failed to load storlet class "
+ strStorletClassName + "class path is "
+ System.getProperty("java.class.path"));
logger_.error(strStorletName_ + ": " + e.getStackTrace().toString());
}
return storlet;
}
private static IStorlet loadStorlet(final String strStorletClassName) {
IStorlet storlet = null;
try {
Class<?> c = Class.forName(strStorletClassName);
storlet = (IStorlet) c.newInstance();
} catch (Exception e) {
logger_.error(strStorletName_ + ": Failed to load storlet class "
+ strStorletClassName + "class path is "
+ System.getProperty("java.class.path"));
logger_.error(strStorletName_ + ": " + e.getStackTrace().toString());
}
return storlet;
}
/*------------------------------------------------------------------------
* main
*
* Entry point.
* args[0] - storlet class name
* args[1] - path to SBus
* args[2] - log level
* args[3] - thread pool size
*
* Invocation from CLI example:
* java -Djava.library.path=. ...
*
* when packed in a .jar with the native .so use:
* java
* -Djava.library.path=.
* -Djava.class.path=.:./storletdaemon.jar
* org.openstack.storlet.daemon.StorletDaemon
* <args>
*
* where <args> can be: storlet.test.TestStorlet /tmp/aaa FINE 5
*
* */
public static void main(String[] args) throws Exception {
initialize(args);
mainLoop();
exit();
}
/*------------------------------------------------------------------------
* main
*
* Entry point.
* args[0] - storlet class name
* args[1] - path to SBus
* args[2] - log level
* args[3] - thread pool size
*
* Invocation from CLI example:
* java -Djava.library.path=. ...
*
* when packed in a .jar with the native .so use:
* java
* -Djava.library.path=.
* -Djava.class.path=.:./storletdaemon.jar
* org.openstack.storlet.daemon.StorletDaemon
* <args>
*
* where <args> can be: storlet.test.TestStorlet /tmp/aaa FINE 5
*
* */
public static void main(String[] args) throws Exception {
initialize(args);
mainLoop();
exit();
}
/*------------------------------------------------------------------------
* initialize
*
* Initialize the resources
* */
private static void initialize(String[] args) throws Exception {
strStorletName_ = args[0];
String strSBusPath = args[1];
String strLogLevel = args[2];
int nPoolSize = Integer.parseInt(args[3]);
String strContId = args[4];
/*------------------------------------------------------------------------
* initialize
*
* Initialize the resources
* */
private static void initialize(String[] args) throws Exception {
strStorletName_ = args[0];
String strSBusPath = args[1];
String strLogLevel = args[2];
int nPoolSize = Integer.parseInt(args[3]);
String strContId = args[4];
if (initLog(strStorletName_, strLogLevel) == false)
return;
if (initLog(strStorletName_, strLogLevel) == false)
return;
IStorlet storlet = loadStorlet(strStorletName_);
if (storlet == null)
return;
IStorlet storlet = loadStorlet(strStorletName_);
if (storlet == null)
return;
storletTaskFactory_ = new STaskFactory(storlet, logger_);
logger_.trace("Instanciating SBus");
sbus_ = new SBus(strContId);
try {
logger_.trace("Initialising SBus");
sbus_.create(strSBusPath);
} catch (IOException e) {
logger_.error(strStorletName_ + ": Failed to create SBus");
return;
}
logger_.trace("Initialising thread pool with " + nPoolSize + " threads");
threadPool_ = Executors.newFixedThreadPool(nPoolSize);
taskIdToTask_ = new HashMap<String, Future>();
}
storletTaskFactory_ = new STaskFactory(storlet, logger_);
logger_.trace("Instanciating SBus");
sbus_ = new SBus(strContId);
try {
logger_.trace("Initialising SBus");
sbus_.create(strSBusPath);
} catch (IOException e) {
logger_.error(strStorletName_ + ": Failed to create SBus");
return;
}
logger_.trace("Initialising thread pool with " + nPoolSize + " threads");
threadPool_ = Executors.newFixedThreadPool(nPoolSize);
taskIdToTask_ = new HashMap<String, Future>();
}
/*------------------------------------------------------------------------
* mainLoop
*
* The main loop - listen, receive, execute till the HALT command.
* */
private static void mainLoop() throws Exception {
boolean doContinue = true;
while (doContinue) {
// Wait for incoming commands
try {
logger_.trace(strStorletName_ + ": listening on SBus");
sbus_.listen();
logger_.trace(strStorletName_ + ": SBus listen() returned");
} catch (IOException e) {
logger_.error(strStorletName_ + ": Failed to listen on SBus");
doContinue = false;
break;
}
/*------------------------------------------------------------------------
* mainLoop
*
* The main loop - listen, receive, execute till the HALT command.
* */
private static void mainLoop() throws Exception {
boolean doContinue = true;
while (doContinue) {
// Wait for incoming commands
try {
logger_.trace(strStorletName_ + ": listening on SBus");
sbus_.listen();
logger_.trace(strStorletName_ + ": SBus listen() returned");
} catch (IOException e) {
logger_.error(strStorletName_ + ": Failed to listen on SBus");
doContinue = false;
break;
}
logger_.trace(strStorletName_ + ": Calling receive");
ServerSBusInDatagram dtg = null;
try {
dtg = sbus_.receive();
logger_.trace(strStorletName_ + ": Receive returned");
} catch (Exception e) {
logger_.error(strStorletName_
+ ": Failed to receive data on SBus", e);
doContinue = false;
break;
logger_.trace(strStorletName_ + ": Calling receive");
ServerSBusInDatagram dtg = null;
try {
dtg = sbus_.receive();
logger_.trace(strStorletName_ + ": Receive returned");
} catch (Exception e) {
logger_.error(strStorletName_
+ ": Failed to receive data on SBus", e);
doContinue = false;
break;
}
// We have the request
// Initialize a task according to command and execute it
doContinue = processDatagram(dtg);
}
}
}
// We have the request
// Initialize a task according to command and execute it
doContinue = processDatagram(dtg);
}
}
/*------------------------------------------------------------------------
* processDatagram
*
* Analyze the request datagram. Invoke the relevant storlet
* or do some other job ( halt, description, or maybe something
* else in the future ).
* */
private static boolean processDatagram(ServerSBusInDatagram dtg) {
boolean bStatus = true;
SAbstractTask sTask = null;
try {
logger_.trace(strStorletName_ + ": Calling createStorletTask with "
+ dtg.toString());
sTask = storletTaskFactory_.createStorletTask(dtg);
} catch (StorletException e) {
logger_.trace(strStorletName_ + ": Failed to init task "
+ e.toString());
bStatus = false;
}
/*------------------------------------------------------------------------
* processDatagram
*
* Analyze the request datagram. Invoke the relevant storlet
* or do some other job ( halt, description, or maybe something
* else in the future ).
* */
private static boolean processDatagram(ServerSBusInDatagram dtg) {
boolean bStatus = true;
SAbstractTask sTask = null;
try {
logger_.trace(strStorletName_ + ": Calling createStorletTask with "
+ dtg.toString());
sTask = storletTaskFactory_.createStorletTask(dtg);
} catch (StorletException e) {
logger_.trace(strStorletName_ + ": Failed to init task "
+ e.toString());
bStatus = false;
}
if (null == sTask) {
logger_.error(strStorletName_
+ ": Unknown command received Quitting");
bStatus = false;
} else if (sTask instanceof SHaltTask) {
logger_.trace(strStorletName_ + ": Got Halt Command");
bStatus = false;
} else if (sTask instanceof SExecutionTask) {
logger_.trace(strStorletName_ + ": Got Invoke command");
Future futureTask = threadPool_.submit((SExecutionTask) sTask);
String taskId = futureTask.toString().split("@")[1];
if (null == sTask) {
logger_.error(strStorletName_
+ ": Unknown command received Quitting");
bStatus = false;
} else if (sTask instanceof SHaltTask) {
logger_.trace(strStorletName_ + ": Got Halt Command");
bStatus = false;
} else if (sTask instanceof SExecutionTask) {
logger_.trace(strStorletName_ + ": Got Invoke command");
Future futureTask = threadPool_.submit((SExecutionTask) sTask);
String taskId = futureTask.toString().split("@")[1];
((SExecutionTask) sTask).setTaskIdToTask(taskIdToTask_);
((SExecutionTask) sTask).setTaskId(taskId);
((SExecutionTask) sTask).setTaskIdToTask(taskIdToTask_);
((SExecutionTask) sTask).setTaskId(taskId);
logger_.trace(strStorletName_ + ": task id is " + taskId);
logger_.trace(strStorletName_ + ": task id is " + taskId);
synchronized (taskIdToTask_) {
taskIdToTask_.put(taskId, futureTask);
}
OutputStream taskIdOut = ((SExecutionTask) sTask).getTaskIdOut();
try {
taskIdOut.write(taskId.getBytes());
} catch (IOException e) {
logger_.trace(strStorletName_ + ": problem returning taskId "
+ taskId + ": " + e.toString());
bStatus = false;
} finally {
try{
taskIdOut.close();
} catch (IOException e) {
}
}
} else if (sTask instanceof SDescriptorTask) {
logger_.trace(strStorletName_ + ": Got Descriptor command");
((SDescriptorTask) sTask).run();
} else if (sTask instanceof SPingTask) {
logger_.trace(strStorletName_ + ": Got Ping command");
bStatus = ((SPingTask) sTask).run();
} else if (sTask instanceof SCancelTask) {
String taskId = ((SCancelTask) sTask).getTaskId();
logger_.trace(strStorletName_ + ": Got Cancel command for taskId "
+ taskId);
if (taskIdToTask_.get(taskId) == null) {
bStatus = false;
logger_.trace(strStorletName_ + ": COULD NOT FIND taskId "
+ taskId);
try {
((SCancelTask) sTask).getSOut().write(
(new String("BAD")).getBytes());
} catch (IOException e) {
}
} else {
logger_.trace(strStorletName_ + ": good. found taskId "
+ taskId);
(taskIdToTask_.get(taskId)).cancel(true);
taskIdToTask_.remove(taskId);
}
bStatus = ((SCancelTask) sTask).run();
}
return bStatus;
}
synchronized (taskIdToTask_) {
taskIdToTask_.put(taskId, futureTask);
}
OutputStream taskIdOut = ((SExecutionTask) sTask).getTaskIdOut();
try {
taskIdOut.write(taskId.getBytes());
} catch (IOException e) {
logger_.trace(strStorletName_ + ": problem returning taskId "
+ taskId + ": " + e.toString());
bStatus = false;
} finally {
try{
taskIdOut.close();
} catch (IOException e) {
}
}
} else if (sTask instanceof SDescriptorTask) {
logger_.trace(strStorletName_ + ": Got Descriptor command");
((SDescriptorTask) sTask).run();
} else if (sTask instanceof SPingTask) {
logger_.trace(strStorletName_ + ": Got Ping command");
bStatus = ((SPingTask) sTask).run();
} else if (sTask instanceof SCancelTask) {
String taskId = ((SCancelTask) sTask).getTaskId();
logger_.trace(strStorletName_ + ": Got Cancel command for taskId "
+ taskId);
if (taskIdToTask_.get(taskId) == null) {
bStatus = false;
logger_.trace(strStorletName_ + ": COULD NOT FIND taskId "
+ taskId);
try {
((SCancelTask) sTask).getSOut().write(
(new String("BAD")).getBytes());
} catch (IOException e) {
}
} else {
logger_.trace(strStorletName_ + ": good. found taskId "
+ taskId);
(taskIdToTask_.get(taskId)).cancel(true);
taskIdToTask_.remove(taskId);
}
bStatus = ((SCancelTask) sTask).run();
}
return bStatus;
}
/*------------------------------------------------------------------------
* exit
*
* Release the resources and quit
* */
private static void exit() {
logger_.info(strStorletName_ + ": Daemon for storlet "
+ strStorletName_ + " is going down...shutting down threadpool");
try {
threadPool_.awaitTermination(nDefaultTimeoutToWaitBeforeShutdown_,
TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
threadPool_.shutdown();
logger_.info(strStorletName_ + ": threadpool down");
}
/*------------------------------------------------------------------------
* exit
*
* Release the resources and quit
* */
private static void exit() {
logger_.info(strStorletName_ + ": Daemon for storlet "
+ strStorletName_ + " is going down...shutting down threadpool");
try {
threadPool_.awaitTermination(nDefaultTimeoutToWaitBeforeShutdown_,
TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
threadPool_.shutdown();
logger_.info(strStorletName_ + ": threadpool down");
}
}

View File

@ -28,39 +28,39 @@ import org.openstack.storlet.common.*;
*
* */
public class SDescriptorTask extends SAbstractTask {
private ObjectRequestsTable requestsTable_ = null;
private StorletObjectOutputStream objStream_ = null;
private String strKey_ = null;
private ObjectRequestsTable requestsTable_ = null;
private StorletObjectOutputStream objStream_ = null;
private String strKey_ = null;
/*------------------------------------------------------------------------
* CTOR
* */
public SDescriptorTask(StorletObjectOutputStream objStream,
final String key, ObjectRequestsTable requestsTable, Logger logger) {
super(logger);
this.requestsTable_ = requestsTable;
this.objStream_ = objStream;
this.strKey_ = key;
}
/*------------------------------------------------------------------------
* CTOR
* */
public SDescriptorTask(StorletObjectOutputStream objStream,
final String key, ObjectRequestsTable requestsTable, Logger logger) {
super(logger);
this.requestsTable_ = requestsTable;
this.objStream_ = objStream;
this.strKey_ = key;
}
/*------------------------------------------------------------------------
* run
* */
public void run() {
logger.trace("StorletDescriptorTask: " + "run going to extract key "
+ strKey_);
ObjectRequestEntry entry = requestsTable_.Get(strKey_);
logger.trace("StorletDescriptorTask: " + "run got entry "
+ entry.toString());
try {
logger.trace("StorletDescriptorTask: "
+ "run puttting the obj stream in the entry ");
entry.put(objStream_);
logger.trace("StorletDescriptorTask: "
+ "run obj stream is in the table ");
} catch (InterruptedException e) {
logger.error("InterruptedException while putting obj stream");
}
}
/*------------------------------------------------------------------------
* run
* */
public void run() {
logger.trace("StorletDescriptorTask: " + "run going to extract key "
+ strKey_);
ObjectRequestEntry entry = requestsTable_.Get(strKey_);
logger.trace("StorletDescriptorTask: " + "run got entry "
+ entry.toString());
try {
logger.trace("StorletDescriptorTask: "
+ "run puttting the obj stream in the entry ");
entry.put(objStream_);
logger.trace("StorletDescriptorTask: "
+ "run obj stream is in the table ");
} catch (InterruptedException e) {
logger.error("InterruptedException while putting obj stream");
}
}
}
/* ============================== END OF FILE =============================== */

View File

@ -31,92 +31,92 @@ import java.util.concurrent.Future;
/*----------------------------------------------------------------------------
* SExecutionTask
*
* Thread pool worker. Wraps File I/O streams for the further
* Thread pool worker. Wraps File I/O streams for the further
* utilization by storlet
* */
public class SExecutionTask extends SAbstractTask implements Runnable {
private StorletLogger storletLogger_ = null;
private IStorlet storlet_ = null;
private ArrayList<StorletInputStream> inStreams_ = null;
private ArrayList<StorletOutputStream> outStreams_ = null;
private HashMap<String, String> executionParams_ = null;
private OutputStream taskIdOut_ = null;
private String taskId_ = null;
private HashMap<String, Future> taskIdToTask_ = null;
private StorletLogger storletLogger_ = null;
private IStorlet storlet_ = null;
private ArrayList<StorletInputStream> inStreams_ = null;
private ArrayList<StorletOutputStream> outStreams_ = null;
private HashMap<String, String> executionParams_ = null;
private OutputStream taskIdOut_ = null;
private String taskId_ = null;
private HashMap<String, Future> taskIdToTask_ = null;
public SExecutionTask(IStorlet storlet,
ArrayList<StorletInputStream> instreams,
ArrayList<StorletOutputStream> outstreams, OutputStream taskIdOut,
HashMap<String, String> executionParams,
StorletLogger storletLogger, Logger logger) {
super(logger);
this.storlet_ = storlet;
this.inStreams_ = instreams;
this.outStreams_ = outstreams;
this.executionParams_ = executionParams;
this.storletLogger_ = storletLogger;
this.taskIdOut_ = taskIdOut;
public SExecutionTask(IStorlet storlet,
ArrayList<StorletInputStream> instreams,
ArrayList<StorletOutputStream> outstreams, OutputStream taskIdOut,
HashMap<String, String> executionParams,
StorletLogger storletLogger, Logger logger) {
super(logger);
this.storlet_ = storlet;
this.inStreams_ = instreams;
this.outStreams_ = outstreams;
this.executionParams_ = executionParams;
this.storletLogger_ = storletLogger;
this.taskIdOut_ = taskIdOut;
}
}
public ArrayList<StorletInputStream> getInStreams() {
return inStreams_;
}
public ArrayList<StorletInputStream> getInStreams() {
return inStreams_;
}
public ArrayList<StorletOutputStream> getOutStreams() {
return outStreams_;
}
public ArrayList<StorletOutputStream> getOutStreams() {
return outStreams_;
}
public HashMap<String, String> getExecutionParams() {
return executionParams_;
}
public HashMap<String, String> getExecutionParams() {
return executionParams_;
}
public OutputStream getTaskIdOut() {
return taskIdOut_;
}
public OutputStream getTaskIdOut() {
return taskIdOut_;
}
public void setTaskId(String taskId) {
taskId_ = taskId;
}
public void setTaskId(String taskId) {
taskId_ = taskId;
}
public void setTaskIdToTask(HashMap<String, Future> taskIdToTask) {
taskIdToTask_ = taskIdToTask;
}
public void setTaskIdToTask(HashMap<String, Future> taskIdToTask) {
taskIdToTask_ = taskIdToTask;
}
private void closeStorletInputStreams(){
for(StorletInputStream stream : inStreams_){
stream.close();
}
}
private void closeStorletInputStreams(){
for(StorletInputStream stream : inStreams_){
stream.close();
}
}
private void closeStorletOutputStreams(){
for(StorletOutputStream stream: outStreams_){
stream.close();
}
}
private void closeStorletOutputStreams(){
for(StorletOutputStream stream: outStreams_){
stream.close();
}
}
private void closeStorletStreams(){
closeStorletInputStreams();
closeStorletOutputStreams();
}
private void closeStorletStreams(){
closeStorletInputStreams();
closeStorletOutputStreams();
}
@Override
public void run() {
try {
storletLogger_.emitLog("About to invoke storlet");
storlet_.invoke(inStreams_, outStreams_, executionParams_,
storletLogger_);
storletLogger_.emitLog("Storlet invocation done");
synchronized (taskIdToTask_) {
taskIdToTask_.remove(taskId_);
}
} catch (StorletException e) {
storletLogger_.emitLog(e.getMessage());
} finally {
storletLogger_.close();
@Override
public void run() {
try {
storletLogger_.emitLog("About to invoke storlet");
storlet_.invoke(inStreams_, outStreams_, executionParams_,
storletLogger_);
storletLogger_.emitLog("Storlet invocation done");
synchronized (taskIdToTask_) {
taskIdToTask_.remove(taskId_);
}
} catch (StorletException e) {
storletLogger_.emitLog(e.getMessage());
} finally {
storletLogger_.close();
// We make sure all streams are closed
closeStorletStreams();
}
}
// We make sure all streams are closed
closeStorletStreams();
}
}
}

View File

@ -25,15 +25,15 @@ import org.slf4j.Logger;
/*----------------------------------------------------------------------------
* SHaltTask
*
* Instantiate AbstractTask class. The primary usage intent is to stop
* a relevant working thread.
* Instantiate AbstractTask class. The primary usage intent is to stop
* a relevant working thread.
* */
public class SHaltTask extends SAbstractTask {
/*------------------------------------------------------------------------
* CTOR
* */
public SHaltTask(Logger logger) {
super(logger);
};
/*------------------------------------------------------------------------
* CTOR
* */
public SHaltTask(Logger logger) {
super(logger);
};
}
/* ============================== END OF FILE =============================== */

View File

@ -28,34 +28,34 @@ import org.slf4j.Logger;
/*----------------------------------------------------------------------------
* SPingTask
*
* Instantiate AbstractTask class. The primary usage intent is to response
* Instantiate AbstractTask class. The primary usage intent is to response
* that this Storlet Daemon is running
* */
public class SPingTask extends SAbstractTask {
private OutputStream sOut_ = null;
private OutputStream sOut_ = null;
/*------------------------------------------------------------------------
* CTOR
* */
public SPingTask(OutputStream sOut, Logger logger) {
super(logger);
this.sOut_ = sOut;
}
/*------------------------------------------------------------------------
* CTOR
* */
public SPingTask(OutputStream sOut, Logger logger) {
super(logger);
this.sOut_ = sOut;
}
/*------------------------------------------------------------------------
* run
*
* The actual response on "ping" command.
* */
public boolean run() {
boolean bStatus = true;
try {
this.sOut_.write((new String("OK")).getBytes());
} catch (IOException e) {
e.printStackTrace();
bStatus = false;
}
return bStatus;
}
/*------------------------------------------------------------------------
* run
*
* The actual response on "ping" command.
* */
public boolean run() {
boolean bStatus = true;
try {
this.sOut_.write((new String("OK")).getBytes());
} catch (IOException e) {
e.printStackTrace();
bStatus = false;
}
return bStatus;
}
}
/* ============================== END OF FILE =============================== */

View File

@ -37,238 +37,238 @@ import org.openstack.storlet.sbus.ServerSBusInDatagram;
* Prepare the storlet execution environment
* */
public class STaskFactory {
private IStorlet storlet_;
private Logger logger_;
private ObjectRequestsTable requestsTable_;
private IStorlet storlet_;
private Logger logger_;
private ObjectRequestsTable requestsTable_;
public STaskFactory(IStorlet storlet, Logger logger) {
this.storlet_ = storlet;
this.logger_ = logger;
this.requestsTable_ = new ObjectRequestsTable();
}
public STaskFactory(IStorlet storlet, Logger logger) {
this.storlet_ = storlet;
this.logger_ = logger;
this.requestsTable_ = new ObjectRequestsTable();
}
public SAbstractTask createStorletTask(ServerSBusInDatagram dtg)
throws StorletException {
SAbstractTask ResObj = null;
String command = dtg.getCommand();
public SAbstractTask createStorletTask(ServerSBusInDatagram dtg)
throws StorletException {
SAbstractTask ResObj = null;
String command = dtg.getCommand();
if (command.equals("SBUS_CMD_HALT")) {
this.logger_.trace("createStorletTask: " + "received HALT command");
ResObj = new SHaltTask(logger_);
} else if (command.equals("SBUS_CMD_EXECUTE")) {
this.logger_.trace("createStorletTask: "
+ "received EXECUTE command");
ResObj = createExecutionTask(dtg);
} else if (command.equals("SBUS_CMD_DESCRIPTOR")) {
this.logger_.trace("createStorletTask: "
+ "received Descriptor command");
ResObj = createDescriptorTask(dtg);
} else if (command.equals("SBUS_CMD_PING")) {
this.logger_.trace("createStorletTask: " + "received Ping command");
ResObj = createPingTask(dtg);
} else if (command.equals("SBUS_CMD_CANCEL")) {
this.logger_.trace("createStorletTask: "
+ "received Cancel command");
ResObj = createCancelTask(dtg);
} else {
this.logger_.error("createStorletTask: " + command
+ " is not supported");
}
return ResObj;
}
if (command.equals("SBUS_CMD_HALT")) {
this.logger_.trace("createStorletTask: " + "received HALT command");
ResObj = new SHaltTask(logger_);
} else if (command.equals("SBUS_CMD_EXECUTE")) {
this.logger_.trace("createStorletTask: "
+ "received EXECUTE command");
ResObj = createExecutionTask(dtg);
} else if (command.equals("SBUS_CMD_DESCRIPTOR")) {
this.logger_.trace("createStorletTask: "
+ "received Descriptor command");
ResObj = createDescriptorTask(dtg);
} else if (command.equals("SBUS_CMD_PING")) {
this.logger_.trace("createStorletTask: " + "received Ping command");
ResObj = createPingTask(dtg);
} else if (command.equals("SBUS_CMD_CANCEL")) {
this.logger_.trace("createStorletTask: "
+ "received Cancel command");
ResObj = createCancelTask(dtg);
} else {
this.logger_.error("createStorletTask: " + command
+ " is not supported");
}
return ResObj;
}
private SExecutionTask createExecutionTask(ServerSBusInDatagram dtg)
throws StorletException {
ArrayList<StorletInputStream> inStreams = new ArrayList<StorletInputStream>();
ArrayList<StorletOutputStream> outStreams = new ArrayList<StorletOutputStream>();
StorletLogger storletLogger = null;
int nFiles = dtg.getNFiles();
HashMap<String, HashMap<String, String>>[] FilesMD = dtg.getFilesMetadata();
this.logger_.trace("StorletTask: Got " + nFiles + " fds");
OutputStream taskIdOut = null;
for (int i = 0; i < nFiles; ++i) {
HashMap<String, String> storletsMetadata = FilesMD[i].get("storlets");
HashMap<String, String> storageMetadata = FilesMD[i].get("storage");
FileDescriptor fd = dtg.getFiles()[i];
String strFDtype = storletsMetadata.get("type");
if (strFDtype.equals("SBUS_FD_OUTPUT_TASK_ID")) {
taskIdOut = new FileOutputStream(fd);
} else if (strFDtype.equals("SBUS_FD_INPUT_OBJECT")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_INPUT_OBJECT");
String start = storletsMetadata.get("start");
String end = storletsMetadata.get("end");
if (start != null && end != null) {
RangeStorletInputStream rangeStream;
try {
rangeStream = new RangeStorletInputStream(
fd,
storageMetadata,
Long.parseLong(start),
Long.parseLong(end));
} catch (IOException e) {
this.logger_.error("Got start="+start+" end="+end);
this.logger_.error(e.toString(), e);
throw new StorletException(e.toString());
}
inStreams.add((StorletInputStream)rangeStream);
} else {
inStreams.add(new StorletInputStream(fd, storageMetadata));
}
} else if (strFDtype.equals("SBUS_FD_OUTPUT_OBJECT")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_OUTPUT_OBJECT");
String strNextFDtype = dtg.getFilesMetadata()[i + 1]
.get("storlets").get("type");
if (!strNextFDtype.equals("SBUS_FD_OUTPUT_OBJECT_METADATA")) {
this.logger_.error("StorletTask: fd " + (i + 1)
+ " is not SBUS_FD_OUTPUT_OBJECT_METADATA "
+ " as expected");
} else {
this.logger_.trace("createStorletTask: fd " + (i + 1)
+ " is of type SBUS_FD_OUTPUT_OBJECT_METADATA");
}
outStreams.add(new StorletObjectOutputStream(fd, storageMetadata,
dtg.getFiles()[i + 1]));
++i;
} else if (strFDtype.equals("SBUS_FD_LOGGER")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_LOGGER");
storletLogger = new StorletLogger(fd);
} else if (strFDtype.equals("SBUS_FD_OUTPUT_CONTAINER")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_OUTPUT_CONTAINER");
this.logger_.trace("createStorletTask: md is"
+ storageMetadata.toString());
outStreams.add(new StorletContainerHandle(fd,
storageMetadata, requestsTable_));
} else
this.logger_.error("createStorletTask: fd " + i
+ " is of unknown type " + strFDtype);
}
return new SExecutionTask(storlet_, inStreams, outStreams, taskIdOut,
dtg.getExecParams(), storletLogger, logger_);
}
private SExecutionTask createExecutionTask(ServerSBusInDatagram dtg)
throws StorletException {
ArrayList<StorletInputStream> inStreams = new ArrayList<StorletInputStream>();
ArrayList<StorletOutputStream> outStreams = new ArrayList<StorletOutputStream>();
StorletLogger storletLogger = null;
int nFiles = dtg.getNFiles();
HashMap<String, HashMap<String, String>>[] FilesMD = dtg.getFilesMetadata();
this.logger_.trace("StorletTask: Got " + nFiles + " fds");
OutputStream taskIdOut = null;
for (int i = 0; i < nFiles; ++i) {
HashMap<String, String> storletsMetadata = FilesMD[i].get("storlets");
HashMap<String, String> storageMetadata = FilesMD[i].get("storage");
FileDescriptor fd = dtg.getFiles()[i];
String strFDtype = storletsMetadata.get("type");
if (strFDtype.equals("SBUS_FD_OUTPUT_TASK_ID")) {
taskIdOut = new FileOutputStream(fd);
} else if (strFDtype.equals("SBUS_FD_INPUT_OBJECT")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_INPUT_OBJECT");
String start = storletsMetadata.get("start");
String end = storletsMetadata.get("end");
if (start != null && end != null) {
RangeStorletInputStream rangeStream;
try {
rangeStream = new RangeStorletInputStream(
fd,
storageMetadata,
Long.parseLong(start),
Long.parseLong(end));
} catch (IOException e) {
this.logger_.error("Got start="+start+" end="+end);
this.logger_.error(e.toString(), e);
throw new StorletException(e.toString());
}
inStreams.add((StorletInputStream)rangeStream);
} else {
inStreams.add(new StorletInputStream(fd, storageMetadata));
}
} else if (strFDtype.equals("SBUS_FD_OUTPUT_OBJECT")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_OUTPUT_OBJECT");
String strNextFDtype = dtg.getFilesMetadata()[i + 1]
.get("storlets").get("type");
if (!strNextFDtype.equals("SBUS_FD_OUTPUT_OBJECT_METADATA")) {
this.logger_.error("StorletTask: fd " + (i + 1)
+ " is not SBUS_FD_OUTPUT_OBJECT_METADATA "
+ " as expected");
} else {
this.logger_.trace("createStorletTask: fd " + (i + 1)
+ " is of type SBUS_FD_OUTPUT_OBJECT_METADATA");
}
outStreams.add(new StorletObjectOutputStream(fd, storageMetadata,
dtg.getFiles()[i + 1]));
++i;
} else if (strFDtype.equals("SBUS_FD_LOGGER")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_LOGGER");
storletLogger = new StorletLogger(fd);
} else if (strFDtype.equals("SBUS_FD_OUTPUT_CONTAINER")) {
this.logger_.trace("createStorletTask: fd " + i
+ " is of type SBUS_FD_OUTPUT_CONTAINER");
this.logger_.trace("createStorletTask: md is"
+ storageMetadata.toString());
outStreams.add(new StorletContainerHandle(fd,
storageMetadata, requestsTable_));
} else
this.logger_.error("createStorletTask: fd " + i
+ " is of unknown type " + strFDtype);
}
return new SExecutionTask(storlet_, inStreams, outStreams, taskIdOut,
dtg.getExecParams(), storletLogger, logger_);
}
private SDescriptorTask createDescriptorTask(ServerSBusInDatagram dtg) {
SDescriptorTask ResObj = null;
String strKey = "";
boolean bStatus = true;
private SDescriptorTask createDescriptorTask(ServerSBusInDatagram dtg) {
SDescriptorTask ResObj = null;
String strKey = "";
boolean bStatus = true;
if (2 != dtg.getNFiles()) {
this.logger_.error("createDescriptorTask: "
+ "Wrong fd count for descriptor command. "
+ "Expected 2, got " + dtg.getNFiles());
bStatus = false;
}
this.logger_.trace("createDescriptorTask: #FDs is good");
if (2 != dtg.getNFiles()) {
this.logger_.error("createDescriptorTask: "
+ "Wrong fd count for descriptor command. "
+ "Expected 2, got " + dtg.getNFiles());
bStatus = false;
}
this.logger_.trace("createDescriptorTask: #FDs is good");
if (bStatus) {
strKey = dtg.getExecParams().get("key");
if (null == strKey) {
this.logger_.error("createDescriptorTask: "
+ "No key in params");
bStatus = false;
}
this.logger_.trace("createDescriptorTask: key is good");
}
if (bStatus) {
strKey = dtg.getExecParams().get("key");
if (null == strKey) {
this.logger_.error("createDescriptorTask: "
+ "No key in params");
bStatus = false;
}
this.logger_.trace("createDescriptorTask: key is good");
}
if (bStatus) {
// type is a metadata field used internally, and it should not
// make it further to the Storlet invocation
String strFDType = dtg.getFilesMetadata()[0].get("storlets").get("type");
if (!strFDType.equals("SBUS_FD_OUTPUT_OBJECT")) {
this.logger_.error("createDescriptorTask: "
+ "Wrong fd type for descriptor command. "
+ "Expected SBUS_FD_OUTPUT_OBJECT " + " got "
+ strFDType);
bStatus = false;
}
this.logger_.trace("createStorletTask: "
+ "fd metadata is good. Creating object stream");
}
if (bStatus) {
// type is a metadata field used internally, and it should not
// make it further to the Storlet invocation
String strFDType = dtg.getFilesMetadata()[0].get("storlets").get("type");
if (!strFDType.equals("SBUS_FD_OUTPUT_OBJECT")) {
this.logger_.error("createDescriptorTask: "
+ "Wrong fd type for descriptor command. "
+ "Expected SBUS_FD_OUTPUT_OBJECT " + " got "
+ strFDType);
bStatus = false;
}
this.logger_.trace("createStorletTask: "
+ "fd metadata is good. Creating object stream");
}
if (bStatus) {
StorletObjectOutputStream objStream = new StorletObjectOutputStream(
dtg.getFiles()[0], dtg.getFilesMetadata()[0].get("storage"),
dtg.getFiles()[1]);
// parse descriptor stuff
this.logger_.trace("createStorletTask: "
+ "Returning StorletDescriptorTask");
ResObj = new SDescriptorTask(objStream, strKey, requestsTable_,
logger_);
}
return ResObj;
}
if (bStatus) {
StorletObjectOutputStream objStream = new StorletObjectOutputStream(
dtg.getFiles()[0], dtg.getFilesMetadata()[0].get("storage"),
dtg.getFiles()[1]);
// parse descriptor stuff
this.logger_.trace("createStorletTask: "
+ "Returning StorletDescriptorTask");
ResObj = new SDescriptorTask(objStream, strKey, requestsTable_,
logger_);
}
return ResObj;
}
private SCancelTask createCancelTask(ServerSBusInDatagram dtg) {
SCancelTask ResObj = null;
String taskId = dtg.getTaskId();
boolean bStatus = true;
private SCancelTask createCancelTask(ServerSBusInDatagram dtg) {
SCancelTask ResObj = null;
String taskId = dtg.getTaskId();
boolean bStatus = true;
if (1 != dtg.getNFiles()) {
this.logger_.error("createCancelTask: "
+ "Wrong fd count for descriptor command. "
+ "Expected 1, got " + dtg.getNFiles());
bStatus = false;
}
this.logger_.trace("createCancelTask: #FDs is good");
if (1 != dtg.getNFiles()) {
this.logger_.error("createCancelTask: "
+ "Wrong fd count for descriptor command. "
+ "Expected 1, got " + dtg.getNFiles());
bStatus = false;
}
this.logger_.trace("createCancelTask: #FDs is good");
if (bStatus) {
String strFDType = dtg.getFilesMetadata()[0].get("storlets").get("type");
if (!strFDType.equals("SBUS_FD_SERVICE_OUT")) {
this.logger_.error("createCancelTask: "
+ "Wrong fd type for Cancel command. "
+ "Expected SBUS_FD_SERVICE_OUT " + " got "
+ strFDType);
bStatus = false;
}
this.logger_.trace("createCancelTask: "
+ "fd metadata is good. Creating stream");
}
if (bStatus) {
String strFDType = dtg.getFilesMetadata()[0].get("storlets").get("type");
if (!strFDType.equals("SBUS_FD_SERVICE_OUT")) {
this.logger_.error("createCancelTask: "
+ "Wrong fd type for Cancel command. "
+ "Expected SBUS_FD_SERVICE_OUT " + " got "
+ strFDType);
bStatus = false;
}
this.logger_.trace("createCancelTask: "
+ "fd metadata is good. Creating stream");
}
if (bStatus) {
OutputStream sOut = new FileOutputStream(dtg.getFiles()[0]);
// parse descriptor stuff
this.logger_.trace("createCancelTask: "
+ "Returning StorletCancelTask");
ResObj = new SCancelTask(sOut, logger_, taskId);
}
return ResObj;
}
if (bStatus) {
OutputStream sOut = new FileOutputStream(dtg.getFiles()[0]);
// parse descriptor stuff
this.logger_.trace("createCancelTask: "
+ "Returning StorletCancelTask");
ResObj = new SCancelTask(sOut, logger_, taskId);
}
return ResObj;
}
private SPingTask createPingTask(ServerSBusInDatagram dtg) {
SPingTask ResObj = null;
boolean bStatus = true;
private SPingTask createPingTask(ServerSBusInDatagram dtg) {
SPingTask ResObj = null;
boolean bStatus = true;
if (1 != dtg.getNFiles()) {
this.logger_.error("createPingTask: "
+ "Wrong fd count for descriptor command. "
+ "Expected 1, got " + dtg.getNFiles());
bStatus = false;
}
this.logger_.trace("createPingTask: #FDs is good");
if (1 != dtg.getNFiles()) {
this.logger_.error("createPingTask: "
+ "Wrong fd count for descriptor command. "
+ "Expected 1, got " + dtg.getNFiles());
bStatus = false;
}
this.logger_.trace("createPingTask: #FDs is good");
if (bStatus) {
String strFDType = dtg.getFilesMetadata()[0].get("storlets").get("type");
if (!strFDType.equals("SBUS_FD_SERVICE_OUT")) {
this.logger_.error("createPingTask: "
+ "Wrong fd type for Ping command. "
+ "Expected SBUS_FD_SERVICE_OUT " + " got "
+ strFDType);
bStatus = false;
}
this.logger_.trace("createPingTask: "
+ "fd metadata is good. Creating object stream");
}
if (bStatus) {
String strFDType = dtg.getFilesMetadata()[0].get("storlets").get("type");
if (!strFDType.equals("SBUS_FD_SERVICE_OUT")) {
this.logger_.error("createPingTask: "
+ "Wrong fd type for Ping command. "
+ "Expected SBUS_FD_SERVICE_OUT " + " got "
+ strFDType);
bStatus = false;
}
this.logger_.trace("createPingTask: "
+ "fd metadata is good. Creating object stream");
}
if (bStatus) {
OutputStream sOut = new FileOutputStream(dtg.getFiles()[0]);
// parse descriptor stuff
this.logger_
.trace("createPingTask: " + "Returning StorletPingTask");
ResObj = new SPingTask(sOut, logger_);
}
return ResObj;
}
if (bStatus) {
OutputStream sOut = new FileOutputStream(dtg.getFiles()[0]);
// parse descriptor stuff
this.logger_
.trace("createPingTask: " + "Returning StorletPingTask");
ResObj = new SPingTask(sOut, logger_);
}
return ResObj;
}
}

View File

@ -14,17 +14,17 @@
-->
<project>
<target name="container_restart">
<target name="container_restart">
<mkdir dir="bin" />
<exec dir="." executable="gcc">
<arg line="-o bin/restart_docker_container restart_docker_container.c" />
</exec>
</target>
<exec dir="." executable="gcc">
<arg line="-o bin/restart_docker_container restart_docker_container.c" />
</exec>
</target>
<target name="clean">
<target name="clean">
<delete dir="bin" />
</target>
</target>
<target name="build" depends="container_restart">
</target>
<target name="build" depends="container_restart">
</target>
</project>

View File

@ -31,40 +31,40 @@
*/
int main(int argc, char **argv) {
char command[4096];
char container_name[256];
char container_image[256];
char mount_dir1[1024];
char mount_dir2[1024];
char command[4096];
char container_name[256];
char container_image[256];
char mount_dir1[1024];
char mount_dir2[1024];
if (argc != 5) {
fprintf(stderr, "Usage: %s container_name container_image mount_dir1 mount_dir2\n",
argv[0]);
return 1;
}
if (argc != 5) {
fprintf(stderr, "Usage: %s container_name container_image mount_dir1 mount_dir2\n",
argv[0]);
return 1;
}
snprintf(container_name,(size_t)256,"%s",argv[1]);
snprintf(container_image,(size_t)256,"%s",argv[2]);
snprintf(mount_dir1,(size_t)1024, "%s", argv[3]);
snprintf(mount_dir2,(size_t)1024, "%s", argv[4]);
snprintf(container_name,(size_t)256,"%s",argv[1]);
snprintf(container_image,(size_t)256,"%s",argv[2]);
snprintf(mount_dir1,(size_t)1024, "%s", argv[3]);
snprintf(mount_dir2,(size_t)1024, "%s", argv[4]);
int ret;
setresuid(0,0,0);
setresgid(0,0,0);
sprintf(command,"/usr/bin/docker stop -t 1 %s",container_name);
ret = system(command);
int ret;
setresuid(0,0,0);
setresgid(0,0,0);
sprintf(command,"/usr/bin/docker stop -t 1 %s",container_name);
ret = system(command);
sprintf(command,"/usr/bin/docker rm %s",container_name);
ret = system(command);
sprintf(command,"/usr/bin/docker rm %s",container_name);
ret = system(command);
sprintf(command,
"/usr/bin/docker run --net=none --name %s -d -v /dev/log:/dev/log -v %s -v %s -i -t %s",
container_name,
mount_dir1,
mount_dir2,
container_image);
ret = system(command);
if (ret)
return(EXIT_FAILURE);
return(EXIT_SUCCESS);
sprintf(command,
"/usr/bin/docker run --net=none --name %s -d -v /dev/log:/dev/log -v %s -v %s -i -t %s",
container_name,
mount_dir1,
mount_dir2,
container_image);
ret = system(command);
if (ret)
return(EXIT_FAILURE);
return(EXIT_SUCCESS);
}

View File

@ -15,17 +15,17 @@
<project>
<target name="clean">
<delete dir="build" />
<delete dir="dist" />
<delete dir="storlet_daemon_factory.egg-info" />
</target>
<target name="build">
<exec executable="python" dir="." failonerror="true">
<arg value="setup.py"/>
<arg value="bdist"/>
</exec>
</target>
<target name="clean">
<delete dir="build" />
<delete dir="dist" />
<delete dir="storlet_daemon_factory.egg-info" />
</target>
</project>
<target name="build">
<exec executable="python" dir="." failonerror="true">
<arg value="setup.py" />
<arg value="bdist" />
</exec>
</target>
</project>

View File

@ -15,17 +15,17 @@
<project>
<target name="clean">
<delete dir="build" />
<delete dir="dist" />
<delete dir="storlets.egg-info" />
</target>
<target name="build">
<exec executable="python" dir="." failonerror="true">
<arg value="setup.py"/>
<arg value="bdist"/>
</exec>
</target>
<target name="clean">
<delete dir="build" />
<delete dir="dist" />
<delete dir="storlets.egg-info" />
</target>
</project>
<target name="build">
<exec executable="python" dir="." failonerror="true">
<arg value="setup.py" />
<arg value="bdist" />
</exec>
</target>
</project>

View File

@ -66,7 +66,7 @@ Code Organization
- SCommon/: A Java library required for storlets development
- SDaemon/: A generic Java daemon for loading storlets at runtime
- SMSCripts/: Run time scripts for doing Docker management commands
- SMSCripts/: Run time scripts for doing Docker management commands
- agent/: Python code for Docker side storlets process management
- swift/: Python swift side code
@ -89,7 +89,7 @@ For Storlets Developers
-----------------------
Currently, storlets can be developped in Java only.
To get started, follow:
To get started, follow:
`"S2AIO - Swift Storlets All In One" <http://storlets.readthedocs.io/en/latest/getting_started.html>`__.
The write and deploy a storlet, follow:

View File

@ -15,33 +15,33 @@
<project>
<target name="clean">
<delete dir="bin" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="text">
<exec dir="bin" executable="wget">
<arg line="https://github.com/openstack/storlets -O input.txt" />
</exec>
<exec dir="bin" executable="wget">
<arg line="https://github.com/openstack/storlets -O input.txt" />
</exec>
</target>
<target name="jar" depends="java">
<jar destfile="compressstorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.IdentityStorlet" />
</manifest>
</jar>
<move file="compressstorlet-1.0.jar" todir="bin" />
</target>
<target name="jar" depends="java">
<jar destfile="compressstorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.IdentityStorlet" />
</manifest>
</jar>
<move file="compressstorlet-1.0.jar" todir="bin" />
</target>
<target name="build" depends="clean, jar, text">
</target>
<target name="build" depends="clean, jar, text">
</target>
</project>

View File

@ -39,58 +39,58 @@ import org.openstack.storlet.common.StorletOutputStream;
import org.openstack.storlet.common.StorletUtils;
public class CompressStorlet implements IStorlet {
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("CompressStorlet Invoked");
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("CompressStorlet Invoked");
StorletInputStream sis = inputStreams.get(0);
InputStream is = sis.getStream();
HashMap<String, String> metadata = sis.getMetadata();
StorletInputStream sis = inputStreams.get(0);
InputStream is = sis.getStream();
HashMap<String, String> metadata = sis.getMetadata();
final int COMPRESS = 0;
final int UNCOMPRESS = 1;
final int COMPRESS = 0;
final int UNCOMPRESS = 1;
int action = COMPRESS;
/*
* Get optional action flag
*/
int action = COMPRESS;
/*
* Get optional action flag
*/
String action_str = parameters.get("action");
if (action_str != null && action_str.equals("uncompress"))
{
action = UNCOMPRESS;
}
if (action_str != null && action_str.equals("uncompress"))
{
action = UNCOMPRESS;
}
StorletObjectOutputStream storletObjectOutputStream = (StorletObjectOutputStream)outputStreams.get(0);
storletObjectOutputStream.setMetadata(metadata);
OutputStream outputStream = storletObjectOutputStream.getStream();
try {
byte[] buffer = new byte[65536];
int len;
if (action == COMPRESS) {
GZIPOutputStream gzipOS = new GZIPOutputStream(outputStream);
while((len=is.read(buffer)) != -1) {
gzipOS.write(buffer, 0, len);
}
gzipOS.close();
} else {
GZIPInputStream gzipIS = new GZIPInputStream(is);
while((len = gzipIS.read(buffer)) != -1) {
outputStream.write(buffer, 0, len);
}
gzipIS.close();
}
} catch (IOException e) {
log.emitLog("CompressExample - raised IOException: " + e.getMessage());
} finally {
try {
is.close();
outputStream.close();
} catch (IOException e) {
}
}
log.emitLog("CompressStorlet Invocation done");
StorletObjectOutputStream storletObjectOutputStream = (StorletObjectOutputStream)outputStreams.get(0);
storletObjectOutputStream.setMetadata(metadata);
OutputStream outputStream = storletObjectOutputStream.getStream();
try {
byte[] buffer = new byte[65536];
int len;
if (action == COMPRESS) {
GZIPOutputStream gzipOS = new GZIPOutputStream(outputStream);
while((len=is.read(buffer)) != -1) {
gzipOS.write(buffer, 0, len);
}
gzipOS.close();
} else {
GZIPInputStream gzipIS = new GZIPInputStream(is);
while((len = gzipIS.read(buffer)) != -1) {
outputStream.write(buffer, 0, len);
}
gzipIS.close();
}
} catch (IOException e) {
log.emitLog("CompressExample - raised IOException: " + e.getMessage());
} finally {
try {
is.close();
outputStream.close();
} catch (IOException e) {
}
}
log.emitLog("CompressStorlet Invocation done");
}
}

View File

@ -14,43 +14,36 @@
-->
<project>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="csrc" depends="jar">
<echo message="int main(){return 42;}" file="bin/get42.c" />
</target>
<target name="exe" depends="csrc">
<exec dir="." executable="gcc">
<arg line="-o bin/get42 " />
<arg line="bin/get42.c" />
</exec>
</target>
<target name="jar" depends="java">
<jar destfile="execdepstorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.ExecDepStorlet" />
</manifest>
</jar>
<move file="execdepstorlet-1.0.jar" todir="bin" />
</target>
<target name="text" depends="jar">
<echo message="Some junk content" file="bin/junk.txt" />
</target>
<target name="build" depends="clean,jar,exe,text">
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="csrc" depends="jar">
<echo message="int main(){return 42;}" file="bin/get42.c" />
</target>
<target name="exe" depends="csrc">
<exec dir="." executable="gcc">
<arg line="-o bin/get42 " />
<arg line="bin/get42.c" />
</exec>
</target>
<target name="jar" depends="java">
<jar destfile="execdepstorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.ExecDepStorlet" />
</manifest>
</jar>
<move file="execdepstorlet-1.0.jar" todir="bin" />
</target>
<target name="text" depends="jar">
<echo message="Some junk content" file="bin/junk.txt" />
</target>
<target name="build" depends="clean,jar,exe,text">
</target>
</project>

View File

@ -45,59 +45,59 @@ import org.openstack.storlet.common.StorletUtils;
* copied and "chmod"-ed.
* */
public class ExecDepStorlet implements IStorlet {
private final int nExpectedReturnCode_ = 42;
private final int nExpectedReturnCode_ = 42;
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> arg2, StorletLogger log)
throws StorletException {
StorletInputStream sinob = null;
StorletObjectOutputStream sout = null;
try {
String strContent = "...:::== Inside ExecDepStorlet ==:::...";
String strTimeStamp = new SimpleDateFormat("dd-MM-yyy HH:mm:ss")
.format(new Date());
log.emitLog(strContent);
log.emitLog(strTimeStamp);
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> arg2, StorletLogger log)
throws StorletException {
StorletInputStream sinob = null;
StorletObjectOutputStream sout = null;
try {
String strContent = "...:::== Inside ExecDepStorlet ==:::...";
String strTimeStamp = new SimpleDateFormat("dd-MM-yyy HH:mm:ss")
.format(new Date());
log.emitLog(strContent);
log.emitLog(strTimeStamp);
sinob = inputStreams.get(0);
HashMap<String, String> md = sinob.getMetadata();
sout = (StorletObjectOutputStream) outputStreams.get(0);
Iterator<Entry<String, String>> ii = md.entrySet().iterator();
while (ii.hasNext()) {
@SuppressWarnings("rawtypes")
Map.Entry kv = (Map.Entry) ii.next();
log.emitLog("[ " + kv.getKey() + " ] = " + kv.getValue());
}
// Get the source location of this class image
String strJarPath = StorletUtils.getClassFolder(this.getClass());
sinob = inputStreams.get(0);
HashMap<String, String> md = sinob.getMetadata();
sout = (StorletObjectOutputStream) outputStreams.get(0);
Iterator<Entry<String, String>> ii = md.entrySet().iterator();
while (ii.hasNext()) {
@SuppressWarnings("rawtypes")
Map.Entry kv = (Map.Entry) ii.next();
log.emitLog("[ " + kv.getKey() + " ] = " + kv.getValue());
}
// Get the source location of this class image
String strJarPath = StorletUtils.getClassFolder(this.getClass());
// Combine the invocation string
String strExec = strJarPath + java.io.File.separator + "get42";
log.emitLog("Exec = " + strExec);
// Start process, wait for it to finish, get the exit code
Process ExecProc = new ProcessBuilder(strExec).start();
int nExitCode = ExecProc.waitFor();
String strInvRes = "Exit code = " + nExitCode;
md.put("depend-ret-code", "" + nExitCode);
sout.setMetadata(md);
log.emitLog(strInvRes);
} catch (Exception e) {
System.err.print("Exception: " + e.getMessage());
log.emitLog("Exception: " + e.getMessage());
throw new StorletException(e.getMessage());
} finally {
try {
if (sinob != null)
sinob.getStream().close();
if (sout != null) {
sout.getStream().close();
sout.getMDStream().close();
}
// Combine the invocation string
String strExec = strJarPath + java.io.File.separator + "get42";
log.emitLog("Exec = " + strExec);
// Start process, wait for it to finish, get the exit code
Process ExecProc = new ProcessBuilder(strExec).start();
int nExitCode = ExecProc.waitFor();
String strInvRes = "Exit code = " + nExitCode;
md.put("depend-ret-code", "" + nExitCode);
sout.setMetadata(md);
log.emitLog(strInvRes);
} catch (Exception e) {
System.err.print("Exception: " + e.getMessage());
log.emitLog("Exception: " + e.getMessage());
throw new StorletException(e.getMessage());
} finally {
try {
if (sinob != null)
sinob.getStream().close();
if (sout != null) {
sout.getStream().close();
sout.getMDStream().close();
}
} catch (IOException e) {
}
}
}
} catch (IOException e) {
}
}
}
}

View File

@ -15,31 +15,31 @@
<project>
<target name="clean">
<delete dir="bin" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="jar" depends="java">
<jar destfile="halfstorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.HalfStorlet" />
</manifest>
</jar>
<move file="halfstorlet-1.0.jar" todir="bin" />
</target>
<target name="jar" depends="java">
<jar destfile="halfstorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.HalfStorlet" />
</manifest>
</jar>
<move file="halfstorlet-1.0.jar" todir="bin" />
</target>
<target name="text" depends="jar">
<echo message="abcdefghijklmonp" file="bin/source.txt" />
</target>
<echo message="abcdefghijklmonp" file="bin/source.txt" />
</target>
<target name="build" depends="jar, text">
</target>
<target name="build" depends="jar, text">
</target>
</project>

View File

@ -36,51 +36,51 @@ import org.openstack.storlet.common.StorletObjectOutputStream;
import org.openstack.storlet.common.StorletOutputStream;
public class HalfStorlet implements IStorlet {
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("HalfStorlet Invoked");
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("HalfStorlet Invoked");
StorletInputStream sis = inputStreams.get(0);
StorletInputStream sis = inputStreams.get(0);
StorletObjectOutputStream storletObjectOutputStream;
storletObjectOutputStream = (StorletObjectOutputStream) outputStreams
.get(0);
storletObjectOutputStream.setMetadata(sis.getMetadata());
StorletObjectOutputStream storletObjectOutputStream;
storletObjectOutputStream = (StorletObjectOutputStream) outputStreams
.get(0);
storletObjectOutputStream.setMetadata(sis.getMetadata());
/*
* Copy every other byte from input stream to output stream
*/
log.emitLog("Copying every other byte");
StorletInputStream psis = (StorletInputStream) inputStreams.get(0);
InputStream is;
is = psis.getStream();
/*
* Copy every other byte from input stream to output stream
*/
log.emitLog("Copying every other byte");
StorletInputStream psis = (StorletInputStream) inputStreams.get(0);
InputStream is;
is = psis.getStream();
OutputStream os = storletObjectOutputStream.getStream();
try {
log.emitLog(new Date().toString() + "About to read from input");
int a;
boolean bool = true;
while ((a = is.read()) != -1) {
if (bool)
os.write(a);
bool = !bool;
}
} catch (Exception e) {
log.emitLog("Copying every other byte from input stream to output stream failed: "
+ e.getMessage());
throw new StorletException(
"Copying every other byte from input stream to output stream failed: "
+ e.getMessage());
} finally {
try {
is.close();
os.close();
} catch (IOException e) {
}
}
log.emitLog("HalfStorlet Invocation done");
}
OutputStream os = storletObjectOutputStream.getStream();
try {
log.emitLog(new Date().toString() + "About to read from input");
int a;
boolean bool = true;
while ((a = is.read()) != -1) {
if (bool)
os.write(a);
bool = !bool;
}
} catch (Exception e) {
log.emitLog("Copying every other byte from input stream to output stream failed: "
+ e.getMessage());
throw new StorletException(
"Copying every other byte from input stream to output stream failed: "
+ e.getMessage());
} finally {
try {
is.close();
os.close();
} catch (IOException e) {
}
}
log.emitLog("HalfStorlet Invocation done");
}
}

View File

@ -15,42 +15,42 @@
<project>
<target name="clean">
<delete dir="bin" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="csrc" depends="jar">
<echo message="int main(){return 42;}" file="bin/get42.c" />
</target>
<target name="exe" depends="csrc">
<exec dir="." executable="gcc">
<arg line="-o bin/get42 " />
<arg line="bin/get42.c" />
</exec>
</target>
<target name="csrc" depends="jar">
<echo message="int main(){return 42;}" file="bin/get42.c" />
</target>
<target name="exe" depends="csrc">
<exec dir="." executable="gcc">
<arg line="-o bin/get42 " />
<arg line="bin/get42.c" />
</exec>
</target>
<target name="jar" depends="java">
<jar destfile="identitystorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.IdentityStorlet" />
</manifest>
</jar>
<move file="identitystorlet-1.0.jar" todir="bin" />
</target>
<target name="jar" depends="java">
<jar destfile="identitystorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.IdentityStorlet" />
</manifest>
</jar>
<move file="identitystorlet-1.0.jar" todir="bin" />
</target>
<target name="text" depends="jar">
<echo message="Some content to copy" file="bin/source.txt" />
</target>
<target name="text" depends="jar">
<echo message="Some content to copy" file="bin/source.txt" />
</target>
<target name="build" depends="clean, jar,exe,text">
</target>
<target name="build" depends="clean, jar,exe,text">
</target>
</project>

View File

@ -39,174 +39,174 @@ import org.openstack.storlet.common.StorletOutputStream;
import org.openstack.storlet.common.StorletUtils;
public class IdentityStorlet implements IStorlet {
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("IdentityStorlet Invoked");
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("IdentityStorlet Invoked");
/*
* Copy metadata into out md
*/
HashMap<String, String> md = new HashMap<String, String>();
HashMap<String, String> object_md;
Iterator it;
StorletInputStream sis = inputStreams.get(0);
object_md = sis.getMetadata();
it = object_md.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog("Putting metadata " + (String) pairs.getKey() + "="
+ (String) pairs.getValue());
md.put((String) pairs.getKey(), (String) pairs.getValue());
}
/*
* Copy metadata into out md
*/
HashMap<String, String> md = new HashMap<String, String>();
HashMap<String, String> object_md;
Iterator it;
StorletInputStream sis = inputStreams.get(0);
object_md = sis.getMetadata();
it = object_md.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog("Putting metadata " + (String) pairs.getKey() + "="
+ (String) pairs.getValue());
md.put((String) pairs.getKey(), (String) pairs.getValue());
}
/*
* Get optional execute flag
*/
String strExecute = new String("false");
if (parameters.get("execute") != null) {
strExecute = parameters.get("execute");
}
boolean bExecute = Boolean.parseBoolean(strExecute);
int nExitCode = -1;
/*
* Execute
*/
if (bExecute == true) {
String strJarPath = StorletUtils.getClassFolder(this.getClass());
/*
* Get optional execute flag
*/
String strExecute = new String("false");
if (parameters.get("execute") != null) {
strExecute = parameters.get("execute");
}
boolean bExecute = Boolean.parseBoolean(strExecute);
int nExitCode = -1;
/*
* Execute
*/
if (bExecute == true) {
String strJarPath = StorletUtils.getClassFolder(this.getClass());
// Combine the invocation string
String strExec = strJarPath + java.io.File.separator + "get42";
log.emitLog("Exec = " + strExec);
try {
// Start process, wait for it to finish, get the exit code
Process ExecProc = new ProcessBuilder(strExec).start();
nExitCode = ExecProc.waitFor();
log.emitLog("Exit code = " + nExitCode);
} catch (Exception e) {
log.emitLog("Execution failed. Got Exception " + e.getMessage());
}
}
// Combine the invocation string
String strExec = strJarPath + java.io.File.separator + "get42";
log.emitLog("Exec = " + strExec);
try {
// Start process, wait for it to finish, get the exit code
Process ExecProc = new ProcessBuilder(strExec).start();
nExitCode = ExecProc.waitFor();
log.emitLog("Exit code = " + nExitCode);
} catch (Exception e) {
log.emitLog("Execution failed. Got Exception " + e.getMessage());
}
}
/*
* Get optional chunk size
*/
String strChunkSize = "65536";
if (parameters.get("chunk_size") != null) {
strChunkSize = parameters.get("chunk_size");
}
int iChunkSize;
try {
iChunkSize = Integer.parseInt(strChunkSize);
} catch (NumberFormatException e) {
log.emitLog("The chunk_size parameter is not an integer");
throw new StorletException(
"The chunk_size parameter is not an integer");
}
/*
* Get optional chunk size
*/
String strChunkSize = "65536";
if (parameters.get("chunk_size") != null) {
strChunkSize = parameters.get("chunk_size");
}
int iChunkSize;
try {
iChunkSize = Integer.parseInt(strChunkSize);
} catch (NumberFormatException e) {
log.emitLog("The chunk_size parameter is not an integer");
throw new StorletException(
"The chunk_size parameter is not an integer");
}
/*
* 1) If the output stream is StorletObjectOutputStream we are in a GET
* or PUT scenario where we copy the data and metadata into it. 2) If
* the output stream is StorletContainerHandle we are in a Storlet batch
* scenario where we first ask for a StorletObjectOutputStream, and then
* do the copy.
*/
StorletObjectOutputStream storletObjectOutputStream;
StorletOutputStream storletOutputStream = outputStreams.get(0);
if (storletOutputStream instanceof StorletContainerHandle) {
log.emitLog("Requesting for output object");
StorletContainerHandle storletContainerHandle = (StorletContainerHandle) storletOutputStream;
String objectName = new String(storletContainerHandle.getName()
+ "/copy_target");
storletObjectOutputStream = storletContainerHandle
.getObjectOutputStream(objectName);
storletContainerHandle.close();
} else {
storletObjectOutputStream = (StorletObjectOutputStream) outputStreams
.get(0);
}
/*
* 1) If the output stream is StorletObjectOutputStream we are in a GET
* or PUT scenario where we copy the data and metadata into it. 2) If
* the output stream is StorletContainerHandle we are in a Storlet batch
* scenario where we first ask for a StorletObjectOutputStream, and then
* do the copy.
*/
StorletObjectOutputStream storletObjectOutputStream;
StorletOutputStream storletOutputStream = outputStreams.get(0);
if (storletOutputStream instanceof StorletContainerHandle) {
log.emitLog("Requesting for output object");
StorletContainerHandle storletContainerHandle = (StorletContainerHandle) storletOutputStream;
String objectName = new String(storletContainerHandle.getName()
+ "/copy_target");
storletObjectOutputStream = storletContainerHandle
.getObjectOutputStream(objectName);
storletContainerHandle.close();
} else {
storletObjectOutputStream = (StorletObjectOutputStream) outputStreams
.get(0);
}
/*
* add execution invocation result to out md
*/
if (bExecute == true) {
md.put("Execution result", Integer.toString(nExitCode));
}
/*
* Copy parameters into out md
*/
it = parameters.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog("Putting parameter " + (String) pairs.getKey() + "="
+ (String) pairs.getValue());
md.put("Parameter-" + (String) pairs.getKey(),
(String) pairs.getValue());
}
/*
* add execution invocation result to out md
*/
if (bExecute == true) {
md.put("Execution result", Integer.toString(nExitCode));
}
/*
* Copy parameters into out md
*/
it = parameters.entrySet().iterator();
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog("Putting parameter " + (String) pairs.getKey() + "="
+ (String) pairs.getValue());
md.put("Parameter-" + (String) pairs.getKey(),
(String) pairs.getValue());
}
/*
* Now set the output metadata
*/
log.emitLog("Setting metadata");
storletObjectOutputStream.setMetadata(md);
/*
* Now set the output metadata
*/
log.emitLog("Setting metadata");
storletObjectOutputStream.setMetadata(md);
/*
* Get optional double flag
*/
String strDouble = new String("false");
if (parameters.get("double") != null) {
strDouble = parameters.get("double");
}
boolean bDouble = Boolean.parseBoolean(strDouble);
log.emitLog("bDouble is " + bDouble);
/*
* Get optional double flag
*/
String strDouble = new String("false");
if (parameters.get("double") != null) {
strDouble = parameters.get("double");
}
boolean bDouble = Boolean.parseBoolean(strDouble);
log.emitLog("bDouble is " + bDouble);
/*
* Copy data from input stream to output stream
*/
log.emitLog("Copying data");
StorletInputStream psis = (StorletInputStream) inputStreams.get(0);
InputStream is;
is = psis.getStream();
/*
* Copy data from input stream to output stream
*/
log.emitLog("Copying data");
StorletInputStream psis = (StorletInputStream) inputStreams.get(0);
InputStream is;
is = psis.getStream();
OutputStream os = storletObjectOutputStream.getStream();
final byte[] buffer = new byte[iChunkSize];
String readString = null;
try {
log.emitLog(new Date().toString() + "About to read from input");
for (int bytes_read = is.read(buffer); bytes_read >= 0; bytes_read = is
.read(buffer)) {
log.emitLog(new Date().toString() + "read from input "
+ bytes_read + "bytes");
readString = new String(buffer);
readString = readString.replaceAll("\0", "");
log.emitLog(new Date().toString() + "Writing to output "
+ bytes_read + "bytes");
os.write(readString.getBytes(), 0, bytes_read);
if (bDouble == true) {
log.emitLog("bDouble == true writing again");
log.emitLog(new Date().toString() + "Writing to output "
+ bytes_read + "bytes");
// os.write(buffer);
os.write(readString.getBytes());
}
log.emitLog("About to read from input");
}
os.close();
} catch (Exception e) {
log.emitLog("Copying data from inut stream to output stream failed: "
+ e.getMessage());
throw new StorletException(
"Copying data from inut stream to output stream failed: "
+ e.getMessage());
} finally {
try {
is.close();
os.close();
} catch (IOException e) {
}
}
log.emitLog("IdentityStorlet Invocation done");
}
OutputStream os = storletObjectOutputStream.getStream();
final byte[] buffer = new byte[iChunkSize];
String readString = null;
try {
log.emitLog(new Date().toString() + "About to read from input");
for (int bytes_read = is.read(buffer); bytes_read >= 0; bytes_read = is
.read(buffer)) {
log.emitLog(new Date().toString() + "read from input "
+ bytes_read + "bytes");
readString = new String(buffer);
readString = readString.replaceAll("\0", "");
log.emitLog(new Date().toString() + "Writing to output "
+ bytes_read + "bytes");
os.write(readString.getBytes(), 0, bytes_read);
if (bDouble == true) {
log.emitLog("bDouble == true writing again");
log.emitLog(new Date().toString() + "Writing to output "
+ bytes_read + "bytes");
// os.write(buffer);
os.write(readString.getBytes());
}
log.emitLog("About to read from input");
}
os.close();
} catch (Exception e) {
log.emitLog("Copying data from inut stream to output stream failed: "
+ e.getMessage());
throw new StorletException(
"Copying data from inut stream to output stream failed: "
+ e.getMessage());
} finally {
try {
is.close();
os.close();
} catch (IOException e) {
}
}
log.emitLog("IdentityStorlet Invocation done");
}
}

View File

@ -16,31 +16,31 @@
<project>
<target name="clean">
<delete dir="bin" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin"
classpath="../../Engine/SCommon/bin/SCommon.jar"
includeantruntime="false" />
</target>
<target name="jar" depends="java">
<jar destfile="partitionsidentitystorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.PartitionsIdentityStorlet" />
</manifest>
</jar>
<move file="partitionsidentitystorlet-1.0.jar" todir="bin" />
</target>
<target name="jar" depends="java">
<jar destfile="partitionsidentitystorlet-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.PartitionsIdentityStorlet" />
</manifest>
</jar>
<move file="partitionsidentitystorlet-1.0.jar" todir="bin" />
</target>
<target name="text">
<copy file="records.txt" toFile="bin/records.txt" />
</target>
<target name="text">
<copy file="records.txt" toFile="bin/records.txt" />
</target>
<target name="build" depends="clean, jar, text">
</target>
<target name="build" depends="clean, jar, text">
</target>
</project>

View File

@ -66,149 +66,149 @@ import org.openstack.storlet.common.StorletUtils;
* described in (1)
*/
public class PartitionsIdentityStorlet implements IStorlet {
long m_start, m_end, m_length;
int m_max_record_line;
boolean m_firstPartition;
BufferedReader m_br = null;
long m_start, m_end, m_length;
int m_max_record_line;
boolean m_firstPartition;
BufferedReader m_br = null;
long m_total_lines_emitted = 0;
StorletLogger m_log;
StorletLogger m_log;
private void safeClose(OutputStream os, InputStream is) {
try {
if (m_br != null) m_br.close();
} catch (IOException e) {
}
try {
if (os != null) os.close();
} catch (IOException e) {
}
try {
if (is != null) os.close();
} catch (IOException e) {
}
}
private void safeClose(OutputStream os, InputStream is) {
try {
if (m_br != null) m_br.close();
} catch (IOException e) {
}
try {
if (os != null) os.close();
} catch (IOException e) {
}
try {
if (is != null) os.close();
} catch (IOException e) {
}
}
private void parseInputParameters(Map<String, String> parameters) throws Exception {
if (parameters.get("start") != null) {
m_start = Long.parseLong(parameters.get("start"));
} else {
m_log.emitLog("Missing mandatory start parameter");
throw new Exception("Missing mandatory start parameter");
}
if (parameters.get("end") != null) {
m_end = Long.parseLong(parameters.get("end"));
} else {
m_log.emitLog("Missing mandatory end parameter");
throw new Exception("Missing mandatory end parameter");
}
if (parameters.get("max_record_line") != null) {
m_max_record_line = Integer.parseInt(parameters.get("max_record_line"));
} else {
m_log.emitLog("Missing mandatory max_record_line parameter");
throw new Exception("Missing mandatory max_record_line parameter");
}
if (parameters.get("first_partition") != null) {
m_firstPartition = Boolean.parseBoolean(parameters.get("first_partition"));
} else {
m_log.emitLog("Missing mandatory first_partition parameter");
throw new Exception("Missing mandatory first_partition parameter");
}
m_length = m_end - m_start;
}
private void parseInputParameters(Map<String, String> parameters) throws Exception {
if (parameters.get("start") != null) {
m_start = Long.parseLong(parameters.get("start"));
} else {
m_log.emitLog("Missing mandatory start parameter");
throw new Exception("Missing mandatory start parameter");
}
if (parameters.get("end") != null) {
m_end = Long.parseLong(parameters.get("end"));
} else {
m_log.emitLog("Missing mandatory end parameter");
throw new Exception("Missing mandatory end parameter");
}
if (parameters.get("max_record_line") != null) {
m_max_record_line = Integer.parseInt(parameters.get("max_record_line"));
} else {
m_log.emitLog("Missing mandatory max_record_line parameter");
throw new Exception("Missing mandatory max_record_line parameter");
}
if (parameters.get("first_partition") != null) {
m_firstPartition = Boolean.parseBoolean(parameters.get("first_partition"));
} else {
m_log.emitLog("Missing mandatory first_partition parameter");
throw new Exception("Missing mandatory first_partition parameter");
}
m_length = m_end - m_start;
}
private int consumeFirstLine(OutputStream os) throws IOException {
String line;
line = m_br.readLine();
if (line == null) {
m_log.emitLog("m_br fully consumed on first line");
throw new IOException("m_br fully consumed on first line");
}
if (m_firstPartition == true) {
private int consumeFirstLine(OutputStream os) throws IOException {
String line;
line = m_br.readLine();
if (line == null) {
m_log.emitLog("m_br fully consumed on first line");
throw new IOException("m_br fully consumed on first line");
}
if (m_firstPartition == true) {
m_total_lines_emitted += 1;
//m_log.emitLog("This is the first partition, writing first line");
//m_log.emitLog("wrote: " + new String(line.getBytes(),"UTF-8") + "\n");
os.write(line.getBytes());
os.write('\n');
} else {
//m_log.emitLog("This is NOT the first partition, skipping first line");
}
//m_log.emitLog("This is the first partition, writing first line");
//m_log.emitLog("wrote: " + new String(line.getBytes(),"UTF-8") + "\n");
os.write(line.getBytes());
os.write('\n');
} else {
//m_log.emitLog("This is NOT the first partition, skipping first line");
}
return line.length() + 1;
}
return line.length() + 1;
}
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
m_log = log;
m_log.emitLog("PartitionsIdentityStorlet Invoked");
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
m_log = log;
m_log.emitLog("PartitionsIdentityStorlet Invoked");
StorletObjectOutputStream sos = null;
OutputStream os = null;
InputStream is = null;
try {
sos = (StorletObjectOutputStream)outputStreams.get(0);
sos.setMetadata(new HashMap<String, String>());
os = sos.getStream();
is = inputStreams.get(0).getStream();
} catch (Exception ex) {
m_log.emitLog("Failed to get streams from Storlet invoke inputs");
safeClose(os, is);
sos = (StorletObjectOutputStream)outputStreams.get(0);
sos.setMetadata(new HashMap<String, String>());
os = sos.getStream();
is = inputStreams.get(0).getStream();
} catch (Exception ex) {
m_log.emitLog("Failed to get streams from Storlet invoke inputs");
safeClose(os, is);
return;
}
}
/*
* Get mandatory parameters
*/
try {
parseInputParameters(parameters);
} catch (Exception ex) {
m_log.emitLog("Failed to initialize input stream");
safeClose(os, is);
/*
* Get mandatory parameters
*/
try {
parseInputParameters(parameters);
} catch (Exception ex) {
m_log.emitLog("Failed to initialize input stream");
safeClose(os, is);
return;
}
}
String line;
int lineLength = 0;
try {
m_br = new BufferedReader(new InputStreamReader(is));
} catch (Exception ex) {
m_log.emitLog("Failed to initialize input stream");
String line;
int lineLength = 0;
try {
m_br = new BufferedReader(new InputStreamReader(is));
} catch (Exception ex) {
m_log.emitLog("Failed to initialize input stream");
safeClose(os, is);
return;
}
try {
lineLength = consumeFirstLine(os);
} catch (Exception ex) {
m_log.emitLog("Failed to consume first line");
lineLength = consumeFirstLine(os);
} catch (Exception ex) {
m_log.emitLog("Failed to consume first line");
safeClose(os, is);
return;
}
m_length -= lineLength;
try {
// We allow m_length to get to -1 so as to read an extra record
// if m_end points exactly to an end of a record.
while ( ((line = m_br.readLine()) != null) && (m_length >= -1) ) {
m_length -= lineLength;
try {
// We allow m_length to get to -1 so as to read an extra record
// if m_end points exactly to an end of a record.
while ( ((line = m_br.readLine()) != null) && (m_length >= -1) ) {
m_total_lines_emitted += 1;
os.write(line.getBytes());
os.write('\n');
//m_log.emitLog("m_length is " + m_length);
//m_log.emitLog("wrote: " + new String(line.getBytes(),"UTF-8") + "\n");
m_length -= (line.length() + 1);
}
os.write(line.getBytes());
os.write('\n');
//m_log.emitLog("m_length is " + m_length);
//m_log.emitLog("wrote: " + new String(line.getBytes(),"UTF-8") + "\n");
m_length -= (line.length() + 1);
}
if (m_length > 0)
m_log.emitLog("Got a null line while not consuming all range");
m_log.emitLog("Got a null line while not consuming all range");
} catch (Exception ex) {
m_log.emitLog("Exception while consuming range " + Arrays.toString(ex.getStackTrace()) );
} finally {
} catch (Exception ex) {
m_log.emitLog("Exception while consuming range " + Arrays.toString(ex.getStackTrace()) );
} finally {
safeClose(os, is);
}
}
m_log.emitLog("Total lines emitted: " + m_total_lines_emitted);
}
}
}

View File

@ -14,33 +14,27 @@
-->
<project>
<target name="common">
<mkdir dir="bin" />
</target>
<target name="testmetadatastorlet">
<javac srcdir="src/org/openstack/storlet/testmetadatastorlet" destdir="bin" includeantruntime="false">
<classpath>
<pathelement
path="../../Engine/SCommon/bin/SCommon.jar"/>
</classpath>
</javac>
<jar destfile="bin/testmetadatastorlet-1.0.jar"
basedir="bin"
includes="org/openstack/storlet/testmetadatastorlet/*">
</jar>
</target>
<target name="text">
<echo message="Some content to copy" file="bin/source.txt" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="build" depends="clean, common, text, testmetadatastorlet"/>
<target name="common">
<mkdir dir="bin" />
</target>
<target name="testmetadatastorlet">
<javac srcdir="src/org/openstack/storlet/testmetadatastorlet" destdir="bin" includeantruntime="false">
<classpath>
<pathelement
path="../../Engine/SCommon/bin/SCommon.jar"/>
</classpath>
</javac>
<jar destfile="bin/testmetadatastorlet-1.0.jar"
basedir="bin"
includes="org/openstack/storlet/testmetadatastorlet/*">
</jar>
</target>
<target name="text">
<echo message="Some content to copy" file="bin/source.txt" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="build" depends="clean, common, text, testmetadatastorlet"/>
</project>

View File

@ -32,46 +32,46 @@ import org.openstack.storlet.common.StorletObjectOutputStream;
import org.openstack.storlet.common.StorletOutputStream;
public class MetadataStorlet implements IStorlet {
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("Test Metadata Storlet Invoked");
final InputStream inputStream = inputStreams.get(0).getStream();
final HashMap<String, String> metadata = inputStreams.get(0)
.getMetadata();
final StorletObjectOutputStream storletObjectOutputStream = (StorletObjectOutputStream) outputStreams
.get(0);
Iterator it = metadata.entrySet().iterator();
log.emitLog("Printing the input metadata");
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog((String) pairs.getKey() + " : "
+ (String) pairs.getValue());
}
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("Test Metadata Storlet Invoked");
final InputStream inputStream = inputStreams.get(0).getStream();
final HashMap<String, String> metadata = inputStreams.get(0)
.getMetadata();
final StorletObjectOutputStream storletObjectOutputStream = (StorletObjectOutputStream) outputStreams
.get(0);
Iterator it = metadata.entrySet().iterator();
log.emitLog("Printing the input metadata");
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog((String) pairs.getKey() + " : "
+ (String) pairs.getValue());
}
metadata.put("override_key", "new_value");
it = metadata.entrySet().iterator();
log.emitLog("Printing the input metadata");
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog((String) pairs.getKey() + " : "
+ (String) pairs.getValue());
}
storletObjectOutputStream.setMetadata(metadata);
metadata.put("override_key", "new_value");
it = metadata.entrySet().iterator();
log.emitLog("Printing the input metadata");
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
log.emitLog((String) pairs.getKey() + " : "
+ (String) pairs.getValue());
}
storletObjectOutputStream.setMetadata(metadata);
OutputStream outputStream = storletObjectOutputStream.getStream();
try {
byte[] bytearray = new byte[100];
inputStream.read(bytearray, 0, 100);
OutputStream outputStream = storletObjectOutputStream.getStream();
try {
byte[] bytearray = new byte[100];
inputStream.read(bytearray, 0, 100);
outputStream.write("1234567890".getBytes());
inputStream.close();
outputStream.close();
} catch (IOException ex) {
log.emitLog(ex.getMessage());
throw new StorletException(ex.getMessage());
}
}
outputStream.write("1234567890".getBytes());
inputStream.close();
outputStream.close();
} catch (IOException ex) {
log.emitLog(ex.getMessage());
throw new StorletException(ex.getMessage());
}
}
}

View File

@ -14,29 +14,23 @@
-->
<project>
<target name="common">
<mkdir dir="bin" />
</target>
<target name="test">
<javac srcdir="src/org/openstack/storlet/test" destdir="bin" includeantruntime="false">
<classpath>
<pathelement
path="../../Engine/SCommon/bin/SCommon.jar"/>
</classpath>
</javac>
<jar destfile="bin/test-10.jar"
basedir="bin"
includes="org/openstack/storlet/test/*">
</jar>
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="build" depends="clean, common, test"/>
<target name="common">
<mkdir dir="bin" />
</target>
<target name="test">
<javac srcdir="src/org/openstack/storlet/test" destdir="bin" includeantruntime="false">
<classpath>
<pathelement
path="../../Engine/SCommon/bin/SCommon.jar"/>
</classpath>
</javac>
<jar destfile="bin/test-10.jar"
basedir="bin"
includes="org/openstack/storlet/test/*">
</jar>
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="build" depends="clean, common, test"/>
</project>

View File

@ -34,73 +34,73 @@ import org.openstack.storlet.common.StorletOutputStream;
import org.openstack.storlet.common.StorletObjectOutputStream;
public class test1 implements IStorlet {
/***
* Storlet invoke method.
*
* @throws InterruptedException
*/
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> params, StorletLogger logger)
throws StorletException {
try {
logger.emitLog("In test invoke!");
logger.emitLog("Iterating over params");
for (Map.Entry<String, String> entry : params.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
logger.emitLog(key + ":" + value);
}
StorletInputStream sins = inputStreams.get(0);
HashMap<String, String> md = sins.getMetadata();
StorletObjectOutputStream outStream = (StorletObjectOutputStream) outputStreams
.get(0);
outStream.setMetadata(md);
OutputStream stream = outStream.getStream();
logger.emitLog("About to get param op");
String op = params.get("op");
if (op == null) {
logger.emitLog("No op raising...");
throw new StorletException("no op in params");
}
logger.emitLog("Got op " + op);
if (op.equals("print")) {
logger.emitLog("op = print");
String key;
String value;
String s;
for (Map.Entry<String, String> entry : params.entrySet()) {
key = entry.getKey();
stream.write(key.getBytes());
s = " ";
stream.write(s.getBytes());
value = entry.getValue();
stream.write(value.getBytes());
s = "\n";
stream.write(s.getBytes());
}
stream.close();
return;
}
/***
* Storlet invoke method.
*
* @throws InterruptedException
*/
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> params, StorletLogger logger)
throws StorletException {
try {
logger.emitLog("In test invoke!");
logger.emitLog("Iterating over params");
for (Map.Entry<String, String> entry : params.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
logger.emitLog(key + ":" + value);
}
StorletInputStream sins = inputStreams.get(0);
HashMap<String, String> md = sins.getMetadata();
StorletObjectOutputStream outStream = (StorletObjectOutputStream) outputStreams
.get(0);
outStream.setMetadata(md);
OutputStream stream = outStream.getStream();
logger.emitLog("About to get param op");
String op = params.get("op");
if (op == null) {
logger.emitLog("No op raising...");
throw new StorletException("no op in params");
}
logger.emitLog("Got op " + op);
if (op.equals("print")) {
logger.emitLog("op = print");
String key;
String value;
String s;
for (Map.Entry<String, String> entry : params.entrySet()) {
key = entry.getKey();
stream.write(key.getBytes());
s = " ";
stream.write(s.getBytes());
value = entry.getValue();
stream.write(value.getBytes());
s = "\n";
stream.write(s.getBytes());
}
stream.close();
return;
}
if (op.equals("crash")) {
InputStream a = null;
a.close();
return;
}
if (op.equals("crash")) {
InputStream a = null;
a.close();
return;
}
if (op.equals("hold")) {
Thread.sleep(100000);
}
outStream.getStream().close();
if (op.equals("hold")) {
Thread.sleep(100000);
}
outStream.getStream().close();
} catch (IOException e) {
logger.emitLog(e.getMessage());
throw new StorletException(e.getMessage());
} catch (InterruptedException e) {
logger.emitLog(e.getMessage());
throw new StorletException(e.getMessage());
}
}
} catch (IOException e) {
logger.emitLog(e.getMessage());
throw new StorletException(e.getMessage());
} catch (InterruptedException e) {
logger.emitLog(e.getMessage());
throw new StorletException(e.getMessage());
}
}
}

View File

@ -15,33 +15,33 @@
<project>
<target name="clean">
<delete dir="bin" />
</target>
<target name="clean">
<delete dir="bin" />
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin" includeantruntime="false">
<classpath>
<pathelement path="../../Engine/SCommon/bin/SCommon.jar"/>
</classpath>
</javac>
</target>
<target name="java">
<mkdir dir="bin" />
<javac srcdir="src" destdir="bin" includeantruntime="false">
<classpath>
<pathelement path="../../Engine/SCommon/bin/SCommon.jar"/>
</classpath>
</javac>
</target>
<target name="jar" depends="java">
<jar destfile="thumbnail-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.Thumbnail" />
</manifest>
</jar>
<move file="thumbnail-1.0.jar" todir="bin" />
</target>
<target name="jar" depends="java">
<jar destfile="thumbnail-1.0.jar" basedir="bin">
<manifest>
<attribute name="Main-Class"
value="org.openstack.storlet.Thumbnail" />
</manifest>
</jar>
<move file="thumbnail-1.0.jar" todir="bin" />
</target>
<target name="jpg">
<copy file="sample.jpg" toFile="bin/sample.jpg" />
</target>
<target name="jpg">
<copy file="sample.jpg" toFile="bin/sample.jpg" />
</target>
<target name="build" depends="jar,jpg">
</target>
<target name="build" depends="jar,jpg">
</target>
</project>

View File

@ -46,91 +46,91 @@ import java.awt.Graphics2D;
import java.awt.RenderingHints;
public class ThumbnailStorlet implements IStorlet {
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("ThumbnailStorlet Invoked");
@Override
public void invoke(ArrayList<StorletInputStream> inputStreams,
ArrayList<StorletOutputStream> outputStreams,
Map<String, String> parameters, StorletLogger log)
throws StorletException {
log.emitLog("ThumbnailStorlet Invoked");
/*
* Get input stuff
*/
HashMap<String, String> object_md;
StorletInputStream storletInputStream = inputStreams.get(0);
/*
* Get input stuff
*/
HashMap<String, String> object_md;
StorletInputStream storletInputStream = inputStreams.get(0);
InputStream thumbnailInputStream = storletInputStream.getStream();
object_md = storletInputStream.getMetadata();
/*
* Get output stuff
*/
object_md = storletInputStream.getMetadata();
/*
* Get output stuff
*/
StorletObjectOutputStream storletObjectOutputStream = (StorletObjectOutputStream)outputStreams.get(0);
StorletObjectOutputStream storletObjectOutputStream = (StorletObjectOutputStream)outputStreams.get(0);
OutputStream thumbnailOutputStream = storletObjectOutputStream.getStream();
/*
* Set the output metadata
*/
log.emitLog("Setting metadata");
storletObjectOutputStream.setMetadata(object_md);
/*
* Set the output metadata
*/
log.emitLog("Setting metadata");
storletObjectOutputStream.setMetadata(object_md);
/*
* Read Input to BufferedImage
*/
log.emitLog("Reading Input");
BufferedImage img = null;
/*
* Read Input to BufferedImage
*/
log.emitLog("Reading Input");
BufferedImage img = null;
try {
img = ImageIO.read(thumbnailInputStream);
} catch (Exception e) {
log.emitLog("Failed to read input stream to buffered image");
throw new StorletException("Failed to read input stream to buffered image " + e.getMessage());
} finally {
try {
thumbnailInputStream.close();
} catch (IOException e) {
log.emitLog("Failed to close input stream");
}
}
try {
thumbnailInputStream.close();
} catch (IOException e) {
log.emitLog("Failed to close input stream");
}
img = ImageIO.read(thumbnailInputStream);
} catch (Exception e) {
log.emitLog("Failed to read input stream to buffered image");
throw new StorletException("Failed to read input stream to buffered image " + e.getMessage());
} finally {
try {
thumbnailInputStream.close();
} catch (IOException e) {
log.emitLog("Failed to close input stream");
}
}
try {
thumbnailInputStream.close();
} catch (IOException e) {
log.emitLog("Failed to close input stream");
}
/*
* Convert
*/
log.emitLog("Converting");
int newH = img.getHeight()/8;
int newW = img.getWidth()/8;
/*
* Convert
*/
log.emitLog("Converting");
int newH = img.getHeight()/8;
int newW = img.getWidth()/8;
int type = img.getTransparency() == Transparency.OPAQUE ? BufferedImage.TYPE_INT_RGB : BufferedImage.TYPE_INT_ARGB;
BufferedImage thumbnailImage = new BufferedImage(newW, newH, type);
BufferedImage thumbnailImage = new BufferedImage(newW, newH, type);
Graphics2D g = thumbnailImage.createGraphics();
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.drawImage(img, 0, 0, newW, newH, null);
g.dispose();
/*
* Write
*/
log.emitLog("Writing Output");
try {
ImageIO.write(thumbnailImage, "PNG" , thumbnailOutputStream);
} catch (Exception e) {
log.emitLog("Failed to write image to out stream");
throw new StorletException("Failed to write image to out stream " + e.getMessage());
} finally {
try {
thumbnailOutputStream.close();
} catch (IOException e) {
}
}
/*
* Write
*/
log.emitLog("Writing Output");
try {
ImageIO.write(thumbnailImage, "PNG" , thumbnailOutputStream);
} catch (Exception e) {
log.emitLog("Failed to write image to out stream");
throw new StorletException("Failed to write image to out stream " + e.getMessage());
} finally {
try {
thumbnailOutputStream.close();
} catch (IOException e) {
}
}
try {
thumbnailOutputStream.close();
} catch (IOException e) {
}
try {
thumbnailOutputStream.close();
} catch (IOException e) {
}
log.emitLog("Done");
log.emitLog("Done");
}
}
}

186
build.xml
View File

@ -14,111 +14,111 @@
-->
<project>
<!-- Bring build Dependencies-->
<target name="dependencies" >
<mkdir dir="Engine/dependencies" />
<get src="http://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/json-simple/json_simple-1.1.jar"
dest="Engine/dependencies/json_simple-1.1.jar"
verbose="true"
usetimestamp="true"/>
<get src="http://central.maven.org/maven2/ch/qos/logback/logback-classic/1.1.2/logback-classic-1.1.2.jar"
dest="Engine/dependencies/logback-classic-1.1.2.jar"
verbose="true"
usetimestamp="true"/>
<get src="http://central.maven.org/maven2/ch/qos/logback/logback-core/1.1.2/logback-core-1.1.2.jar"
dest="Engine/dependencies/logback-core-1.1.2.jar"
verbose="true"
usetimestamp="true"/>
<get src="http://central.maven.org/maven2/org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.jar"
dest="Engine/dependencies/slf4j-api-1.7.7.jar"
verbose="true"
usetimestamp="true"/>
<copy file="install/storlets/roles/docker_storlet_engine_image/files/logback.xml"
tofile="Engine/dependencies/logback.xml"/>
</target>
<!-- Bring build Dependencies-->
<target name="dependencies" >
<mkdir dir="Engine/dependencies" />
<get src="http://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/json-simple/json_simple-1.1.jar"
dest="Engine/dependencies/json_simple-1.1.jar"
verbose="true"
usetimestamp="true"/>
<get src="http://central.maven.org/maven2/ch/qos/logback/logback-classic/1.1.2/logback-classic-1.1.2.jar"
dest="Engine/dependencies/logback-classic-1.1.2.jar"
verbose="true"
usetimestamp="true"/>
<get src="http://central.maven.org/maven2/ch/qos/logback/logback-core/1.1.2/logback-core-1.1.2.jar"
dest="Engine/dependencies/logback-core-1.1.2.jar"
verbose="true"
usetimestamp="true"/>
<get src="http://central.maven.org/maven2/org/slf4j/slf4j-api/1.7.7/slf4j-api-1.7.7.jar"
dest="Engine/dependencies/slf4j-api-1.7.7.jar"
verbose="true"
usetimestamp="true"/>
<copy file="install/storlets/roles/docker_storlet_engine_image/files/logback.xml"
tofile="Engine/dependencies/logback.xml"/>
</target>
<!-- Storlets Engine build -->
<macrodef name="iterate_engine">
<attribute name="target" />
<sequential>
<subant target="@{target}">
<fileset dir="Engine/SBus/SBusTransportLayer" includes="build.xml" />
<fileset dir="Engine/SBus/SBusJavaFacade" includes="build.xml" />
<fileset dir="Engine/SBus/SBusPythonFacade" includes="build.xml" />
<fileset dir="Engine/SCommon" includes="build.xml" />
<fileset dir="Engine/SDaemon" includes="build.xml" />
<fileset dir="Engine/SMScripts" includes="build.xml" />
<fileset dir="Engine/agent" includes="build.xml" />
<fileset dir="Engine/swift" includes="build.xml" />
</subant>
</sequential>
</macrodef>
<!-- Storlets Engine build -->
<macrodef name="iterate_engine">
<attribute name="target" />
<sequential>
<subant target="@{target}">
<fileset dir="Engine/SBus/SBusTransportLayer" includes="build.xml" />
<fileset dir="Engine/SBus/SBusJavaFacade" includes="build.xml" />
<fileset dir="Engine/SBus/SBusPythonFacade" includes="build.xml" />
<fileset dir="Engine/SCommon" includes="build.xml" />
<fileset dir="Engine/SDaemon" includes="build.xml" />
<fileset dir="Engine/SMScripts" includes="build.xml" />
<fileset dir="Engine/agent" includes="build.xml" />
<fileset dir="Engine/swift" includes="build.xml" />
</subant>
</sequential>
</macrodef>
<target name="build_engine" depends="dependencies">
<iterate_engine target="build" />
</target>
<target name="build_engine" depends="dependencies">
<iterate_engine target="build" />
</target>
<target name="clean_engine">
<iterate_engine target="clean" />
<target name="clean_engine">
<iterate_engine target="clean" />
<delete dir="Engine/dependencies" />
</target>
<!-- Storlets Engine build /-->
</target>
<!-- Storlets Engine build /-->
<!-- Storlets Samples build -->
<macrodef name="iterate_storlets">
<attribute name="target" />
<sequential>
<subant target="@{target}">
<fileset dir="StorletSamples/TestStorlet" includes="build.xml" />
<fileset dir="StorletSamples/ExecDepStorlet" includes="build.xml" />
<fileset dir="StorletSamples/IdentityStorlet" includes="build.xml" />
<fileset dir="StorletSamples/PartitionsIdentityStorlet" includes="build.xml" />
<fileset dir="StorletSamples/TestMetadataStorlet" includes="build.xml" />
<fileset dir="StorletSamples/HalfStorlet" includes="build.xml" />
<fileset dir="StorletSamples/CompressStorlet" includes="build.xml" />
<fileset dir="StorletSamples/ThumbnailStorlet" includes="build.xml" />
</subant>
</sequential>
</macrodef>
<!-- Storlets Samples build -->
<macrodef name="iterate_storlets">
<attribute name="target" />
<sequential>
<subant target="@{target}">
<fileset dir="StorletSamples/TestStorlet" includes="build.xml" />
<fileset dir="StorletSamples/ExecDepStorlet" includes="build.xml" />
<fileset dir="StorletSamples/IdentityStorlet" includes="build.xml" />
<fileset dir="StorletSamples/PartitionsIdentityStorlet" includes="build.xml" />
<fileset dir="StorletSamples/TestMetadataStorlet" includes="build.xml" />
<fileset dir="StorletSamples/HalfStorlet" includes="build.xml" />
<fileset dir="StorletSamples/CompressStorlet" includes="build.xml" />
<fileset dir="StorletSamples/ThumbnailStorlet" includes="build.xml" />
</subant>
</sequential>
</macrodef>
<target name="build_storlets">
<iterate_storlets target="build" />
</target>
<target name="build_storlets">
<iterate_storlets target="build" />
</target>
<target name="clean_storlets">
<iterate_storlets target="clean" />
</target>
<target name="clean_storlets">
<iterate_storlets target="clean" />
</target>
<target name="build" depends="build_engine, build_storlets" />
<target name="clean" depends="clean_engine, clean_storlets">
<delete dir="bin" />
<target name="build" depends="build_engine, build_storlets" />
<target name="clean" depends="clean_engine, clean_storlets">
<delete dir="bin" />
</target>
<!-- Storlets Samples build /-->
<!-- Storlets Samples build /-->
<!-- Deploy -->
<!-- To execute the below tasks you must have:
(1) ansible installed
(2) hosts file configured
-->
<target name="deploy_host_engine" depends="build_engine">
<exec executable="ansible-playbook" dir="install/storlets" failonerror="true">
<arg value="-s"/>
<arg value="-i"/>
<arg value="storlets_dynamic_inventory.py"/>
<arg value="host_storlet_engine.yml"/>
</exec>
</target>
<!-- Deploy -->
<!-- To execute the below tasks you must have:
(1) ansible installed
(2) hosts file configured
-->
<target name="deploy_host_engine" depends="build_engine">
<exec executable="ansible-playbook" dir="install/storlets" failonerror="true">
<arg value="-s"/>
<arg value="-i"/>
<arg value="storlets_dynamic_inventory.py"/>
<arg value="host_storlet_engine.yml"/>
</exec>
</target>
<target name="deploy_container_engine" depends="build_engine">
<exec executable="ansible-playbook" dir="install/storlets" failonerror="true">
<arg value="-s"/>
<arg value="-i"/>
<arg value="storlets_dynamic_inventory.py"/>
<arg value="container_storlet_engine.yml"/>
</exec>
</target>
<target name="deploy_container_engine" depends="build_engine">
<exec executable="ansible-playbook" dir="install/storlets" failonerror="true">
<arg value="-s"/>
<arg value="-i"/>
<arg value="storlets_dynamic_inventory.py"/>
<arg value="container_storlet_engine.yml"/>
</exec>
</target>
<target name="deploy" depends="deploy_host_engine, deploy_container_engine"/>
<target name="deploy" depends="deploy_host_engine, deploy_container_engine"/>
</project>

View File

@ -46,7 +46,7 @@ The storlet may depend on other existing libraries, which must be uploaded to th
When uploading a storlet,
the X-Object-Meta-Storlet-Dependency header requires a value that is a comma separated list of dependencies.
The main_class_name parameter for the X-Object-Meta-Storlet-Main header specifies the class in which the invoke
The main_class_name parameter for the X-Object-Meta-Storlet-Main header specifies the class in which the invoke
method of the storlet is defined.
The X-Object-Meta-Storlet-Language header specified the language in which the storlet is run.
At present, only 'Java' is supported.
@ -100,19 +100,19 @@ Once the storlet and its dependencies are deployed the storlet is ready for invo
Storlets can be invoked in 3 ways:
#. Invocation upon object download.
In this case the user gets a transformation of the object residing in the store (as opposed to the actual object).
One use case for GET is anonymization, where the user might not have access to certain data unless it is
being anonymized by some storlet.
In this case the user gets a transformation of the object residing in the store (as opposed to the actual object).
One use case for GET is anonymization, where the user might not have access to certain data unless it is
being anonymized by some storlet.
#. Invocation upon object upload.
In this case the data kept in the object store is a transformation of the object uploaded by the user
(as opposed to the original data or metadata).
A typical use case is metadata enrichment, where a Storlet extracts format specific metadata from the uploaded data
and adds it as Swift metadata.
In this case the data kept in the object store is a transformation of the object uploaded by the user
(as opposed to the original data or metadata).
A typical use case is metadata enrichment, where a Storlet extracts format specific metadata from the uploaded data
and adds it as Swift metadata.
#. Invocation upon object copy.
In this case the storlet acts on data that is in the object store, generating a new object. A typical use case is
thumbnail extraction from an existing jpg.
In this case the storlet acts on data that is in the object store, generating a new object. A typical use case is
thumbnail extraction from an existing jpg.
Invocation involves adding an extra header ('X-Run-Storlet') to the Swift original PUT/GET/COPY requests.
Additional details and examples can be found in <https://github.com/openstack/storlets/blob/master/doc/source/invoking_storlets.rst>.

View File

@ -20,82 +20,82 @@ The code below shows the invocation. Some notes:
get42 binary dependency is to be called. Note the difference in the response
headers where one shows the execution result and the other does not.
#. Note the X-Run-Storlet header. being added to the call.
#. Note the X-Generate-Log storlet that causes a log file to be created.
#. Note the X-Generate-Log storlet that causes a log file to be created.
The execution results below show the log retrieval.
::
from swiftclient import client as c
def get_processed_object(url, token, storlet_name, container_name, object_name, invoke_get42 = False):
headers = {'X-Run-Storlet': storlet_name,
'X-Storlet-Generate-Log' : 'True'}
if (invoke_get42 == True):
querystring = 'execute=true'
else:
querystring = None
response_headers, object_content = c.get_object(url,
token,
container_name,
object_name,
query_string = querystring,
response_dict=dict(),
headers = headers)
print response_headers
print object_content
AUTH_IP = '127.0.0.1'
AUTH_PORT = '5000'
ACCOUNT = 'service'
USER_NAME = 'swift'
PASSWORD = 'passw0rd'
os_options = {'tenant_name': ACCOUNT}
url, token = c.get_auth("http://" + AUTH_IP + ":" + AUTH_PORT + "/v2.0", ACCOUNT +":"+USER_NAME, PASSWORD, os_options = os_options, auth_version="2.0")
print 'Identity Storlet invocation without calling get42'
get_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt')
print 'Identity Storlet invocation instructing to call get42'
get_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt', True)
from swiftclient import client as c
def get_processed_object(url, token, storlet_name, container_name, object_name, invoke_get42 = False):
headers = {'X-Run-Storlet': storlet_name,
'X-Storlet-Generate-Log' : 'True'}
if (invoke_get42 == True):
querystring = 'execute=true'
else:
querystring = None
response_headers, object_content = c.get_object(url,
token,
container_name,
object_name,
query_string = querystring,
response_dict=dict(),
headers = headers)
print response_headers
print object_content
AUTH_IP = '127.0.0.1'
AUTH_PORT = '5000'
ACCOUNT = 'service'
USER_NAME = 'swift'
PASSWORD = 'passw0rd'
os_options = {'tenant_name': ACCOUNT}
url, token = c.get_auth("http://" + AUTH_IP + ":" + AUTH_PORT + "/v2.0", ACCOUNT +":"+USER_NAME, PASSWORD, os_options = os_options, auth_version="2.0")
print 'Identity Storlet invocation without calling get42'
get_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt')
print 'Identity Storlet invocation instructing to call get42'
get_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt', True)
Here is the result of the running the above python script:
::
eranr@lnx-ccs8:/tmp$ python get_object_with_storlet.py
Identity Storlet invocation without calling get42
{
'x-object-meta-x-object-meta-testkey': 'tester',
'transfer-encoding': 'chunked',
'accept-ranges': 'bytes',
'x-object-meta-testkey': 'tester',
'last-modified': 'Tue, 30 Sep 2014 22:07:42 GMT',
'etag': '8ca2a24dbd9779d462c66866c0fb90c3',
'x-timestamp': '1412114861.90504',
'x-trans-id': 'tx464a488a618e44b5b763d-00542baa25',
'date': 'Wed, 01 Oct 2014 07:15:50 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'application/octet-stream'
}
Some content to copy
Identity Storlet invocation instructing to call get42
{
'x-object-meta-execution result': '42',
'x-object-meta-x-object-meta-testkey': 'tester',
'transfer-encoding': 'chunked',
'accept-ranges': 'bytes',
'x-object-meta-testkey': 'tester',
'last-modified': 'Tue, 30 Sep 2014 22:07:42 GMT',
'etag': '8ca2a24dbd9779d462c66866c0fb90c3',
'x-timestamp': '1412114861.90504',
'x-trans-id': 'tx12a4f2a168804dcabf8fc-00542baa26',
'date': 'Wed, 01 Oct 2014 07:15:50 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'application/octet-stream'
}
Some content to copy
eranr@lnx-ccs8:/tmp$ python get_object_with_storlet.py
Identity Storlet invocation without calling get42
{
'x-object-meta-x-object-meta-testkey': 'tester',
'transfer-encoding': 'chunked',
'accept-ranges': 'bytes',
'x-object-meta-testkey': 'tester',
'last-modified': 'Tue, 30 Sep 2014 22:07:42 GMT',
'etag': '8ca2a24dbd9779d462c66866c0fb90c3',
'x-timestamp': '1412114861.90504',
'x-trans-id': 'tx464a488a618e44b5b763d-00542baa25',
'date': 'Wed, 01 Oct 2014 07:15:50 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'application/octet-stream'
}
Some content to copy
Identity Storlet invocation instructing to call get42
{
'x-object-meta-execution result': '42',
'x-object-meta-x-object-meta-testkey': 'tester',
'transfer-encoding': 'chunked',
'accept-ranges': 'bytes',
'x-object-meta-testkey': 'tester',
'last-modified': 'Tue, 30 Sep 2014 22:07:42 GMT',
'etag': '8ca2a24dbd9779d462c66866c0fb90c3',
'x-timestamp': '1412114861.90504',
'x-trans-id': 'tx12a4f2a168804dcabf8fc-00542baa26',
'date': 'Wed, 01 Oct 2014 07:15:50 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'application/octet-stream'
}
Some content to copy
We now show a download of the log file generated per the X-Storlet-Generate-Log header.
Again, we use the swift client assuming we have the appropriate environment variables in place.
@ -104,17 +104,17 @@ Note that the log reflects the two invocations done above.
::
eranr@lnx-ccs8:/tmp$ swift download storletlog identitystorlet.log
identitystorlet.log [headers 0.243s, total 0.243s, 0.001 MB/s]
eranr@lnx-ccs8:/tmp$ cat identitystorlet.log
About to invoke storlet
IdentityStorlet Invoked
Storlet invocation done
About to invoke storlet
IdentityStorlet Invoked
Exec = /home/swift/identitystorlet/get42
Exit code = 42
Storlet invocation done
eranr@lnx-ccs8:/tmp$ swift download storletlog identitystorlet.log
identitystorlet.log [headers 0.243s, total 0.243s, 0.001 MB/s]
eranr@lnx-ccs8:/tmp$ cat identitystorlet.log
About to invoke storlet
IdentityStorlet Invoked
Storlet invocation done
About to invoke storlet
IdentityStorlet Invoked
Exec = /home/swift/identitystorlet/get42
Exit code = 42
Storlet invocation done
Invocation on PUT
=================
@ -127,84 +127,84 @@ the code below shows the invocation. Some notes:
::
import random
import string
from swiftclient import client as c
def put_processed_object(url, token, storlet_name, container_name, object_name, file_name_to_upload, invoke_get42 = False):
headers = {'X-Run-Storlet': storlet_name,
'X-Storlet-Generate-Log' : 'True'}
if (invoke_get42 == True):
querystring = 'execute=true'
else:
querystring = None
fileobj = open(file_name_to_upload,'r')
random_md = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(32))
headers = {'X-Run-Storlet': 'identitystorlet-1.0.jar', 'X-Object-Meta-Testkey' : random_md}
c.put_object(url,
token,
container_name,
object_name,
fileobj,
headers = headers,
query_string = querystring,
response_dict=dict())
resp_headers, saved_content = c.get_object(
url,
token,
container_name,
object_name,
response_dict=dict())
print resp_headers
AUTH_IP = '127.0.0.1'
AUTH_PORT = '5000'
ACCOUNT = 'service'
USER_NAME = 'swift'
PASSWORD = 'passw0rd'
os_options = {'tenant_name': ACCOUNT}
url, token = c.get_auth("http://" + AUTH_IP + ":" + AUTH_PORT + "/v2.0", ACCOUNT +":"+USER_NAME, PASSWORD, os_options = os_options, auth_version="2.0")
print 'Identity Storlet invocation without calling get42'
put_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt', '/tmp/source.txt')
print 'Identity Storlet invocation instructing to call get42'
put_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt', '/tmp/source.txt' , True)
import random
import string
from swiftclient import client as c
def put_processed_object(url, token, storlet_name, container_name, object_name, file_name_to_upload, invoke_get42 = False):
headers = {'X-Run-Storlet': storlet_name,
'X-Storlet-Generate-Log' : 'True'}
if (invoke_get42 == True):
querystring = 'execute=true'
else:
querystring = None
fileobj = open(file_name_to_upload,'r')
random_md = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(32))
headers = {'X-Run-Storlet': 'identitystorlet-1.0.jar', 'X-Object-Meta-Testkey' : random_md}
c.put_object(url,
token,
container_name,
object_name,
fileobj,
headers = headers,
query_string = querystring,
response_dict=dict())
resp_headers, saved_content = c.get_object(
url,
token,
container_name,
object_name,
response_dict=dict())
print resp_headers
AUTH_IP = '127.0.0.1'
AUTH_PORT = '5000'
ACCOUNT = 'service'
USER_NAME = 'swift'
PASSWORD = 'passw0rd'
os_options = {'tenant_name': ACCOUNT}
url, token = c.get_auth("http://" + AUTH_IP + ":" + AUTH_PORT + "/v2.0", ACCOUNT +":"+USER_NAME, PASSWORD, os_options = os_options, auth_version="2.0")
print 'Identity Storlet invocation without calling get42'
put_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt', '/tmp/source.txt')
print 'Identity Storlet invocation instructing to call get42'
put_processed_object(url, token, 'identitystorlet-1.0.jar', 'myobjects', 'source.txt', '/tmp/source.txt' , True)
Here is the result of the running the above python script:
::
eranr@lnx-ccs8:/tmp$ python put_object_with_storlet.py
Identity Storlet invocation without calling get42
{
'content-length': '1024',
'x-object-meta-x-object-meta-testkey': '1185FZ5FPQ1WXS9IDT4TZZB6GYAQQ0WL',
'accept-ranges': 'bytes',
'x-object-meta-testkey': '1185FZ5FPQ1WXS9IDT4TZZB6GYAQQ0WL',
'last-modified': 'Wed, 01 Oct 2014 07:48:56 GMT',
'etag': '7575c5b098f45ccabce1c3f7fc906eb9',
'x-timestamp': '1412149735.87168',
'x-trans-id': 'tx9a27ba91bee34a8ca9f0c-00542bb1e7',
'date': 'Wed, 01 Oct 2014 07:48:55 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'text/plain'
}
Identity Storlet invocation instructing to call get42
{
'x-object-meta-execution result': '42',
'content-length': '1024',
'x-object-meta-x-object-meta-testkey': '54YA1EDTTODMBUJOYCHEGSOQQPV0180L', // This looks like a bug
'accept-ranges': 'bytes',
'x-object-meta-testkey': '54YA1EDTTODMBUJOYCHEGSOQQPV0180L',
'last-modified': 'Wed, 01 Oct 2014 07:48:56 GMT',
'etag': '7575c5b098f45ccabce1c3f7fc906eb9',
'x-timestamp': '1412149735.97100',
'x-trans-id': 'txde8619a966c14b0c99d97-00542bb1e8',
'date': 'Wed, 01 Oct 2014 07:48:56 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'text/plain'
}
eranr@lnx-ccs8:/tmp$ python put_object_with_storlet.py
Identity Storlet invocation without calling get42
{
'content-length': '1024',
'x-object-meta-x-object-meta-testkey': '1185FZ5FPQ1WXS9IDT4TZZB6GYAQQ0WL',
'accept-ranges': 'bytes',
'x-object-meta-testkey': '1185FZ5FPQ1WXS9IDT4TZZB6GYAQQ0WL',
'last-modified': 'Wed, 01 Oct 2014 07:48:56 GMT',
'etag': '7575c5b098f45ccabce1c3f7fc906eb9',
'x-timestamp': '1412149735.87168',
'x-trans-id': 'tx9a27ba91bee34a8ca9f0c-00542bb1e7',
'date': 'Wed, 01 Oct 2014 07:48:55 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'text/plain'
}
Identity Storlet invocation instructing to call get42
{
'x-object-meta-execution result': '42',
'content-length': '1024',
'x-object-meta-x-object-meta-testkey': '54YA1EDTTODMBUJOYCHEGSOQQPV0180L', // This looks like a bug
'accept-ranges': 'bytes',
'x-object-meta-testkey': '54YA1EDTTODMBUJOYCHEGSOQQPV0180L',
'last-modified': 'Wed, 01 Oct 2014 07:48:56 GMT',
'etag': '7575c5b098f45ccabce1c3f7fc906eb9',
'x-timestamp': '1412149735.97100',
'x-trans-id': 'txde8619a966c14b0c99d97-00542bb1e8',
'date': 'Wed, 01 Oct 2014 07:48:56 GMT',
'x-object-meta-type': 'SBUS_FD_INPUT_OBJECT',
'content-type': 'text/plain'
}

View File

@ -12,7 +12,7 @@ following:
of the init process. Specifically, do not install daemons that require special
initializations on 'OS bring up'.
The idea is that a user supplied docker image would contain dependencies
The idea is that a user supplied docker image would contain dependencies
required by storlets in the form of libraries.
============
@ -33,11 +33,11 @@ Below are the steps of this flow:
the installed software stack, and upload it back to the docker_images container.
#. Once uploaded, the account manager must notify the Swift Storlet engine manager
of the update. The storlets manager would take care of testing and deploying
it to all Swift nodes. Again, <https://github.com/openstack/storlets/blob/master/doc/source/storlets_management.rst> describes the provided tool
the Storlet manager can use for the actual deployment.
it to all Swift nodes. Again, <https://github.com/openstack/storlets/blob/master/doc/source/storlets_management.rst>
describes the provided tool the Storlet manager can use for the actual deployment.
The sections below describe in detail the steps taken by the account manager.
Downloading the Docker Image
============================
Downloading the Docker image involves a simple retrieval of a Swift object. To
@ -52,7 +52,7 @@ facts:
The image will come in a .tar format.
Below is an example of downloading the image from the tenant's docker_images
Below is an example of downloading the image from the tenant's docker_images
container using the swift CLI. As with all examples using the Swift CLI, we are
using environment variables defining the tenant, user credentials and auth URI.
All these are required for the operation of any Swift CLI. Please change them
@ -109,18 +109,18 @@ The below steps illustrate the tuning process:
root@lnx-ccs8:/home/eranr# docker load -i ubuntu_14.04_jre8_storlets.tar
root@lnx-ccs8:/home/eranr# docker images
REPOSITORY TAG IMAGE ID CREATED VIRTUAL SIZE
localhost:5001/ubuntu_14.04_jre8_storlets latest f6929e6abc60 3 days ago 563.6 MB
REPOSITORY TAG IMAGE ID CREATED VIRTUAL SIZE
localhost:5001/ubuntu_14.04_jre8_storlets latest f6929e6abc60 3 days ago 563.6 MB
2. Use a Docker file that is based on the loaded image to make the necessary
changes to the image. Below is a Dockerfile for installing 'ffmpeg'. Few
changes to the image. Below is a Dockerfile for installing 'ffmpeg'. Few
notes are in place:
#. The first line "FROM" must carry the image name we have downloaded.
#. The maintainer needs to be a user that is allowed to do the actual actions
within the container. Please leave it as is.
#. The below example shows ffmpeg installation. For more options and
information on Dockerfiles, please refer to:
information on Dockerfiles, please refer to:
http://docs.docker.com/reference/builder/
#. One MUST refrain from using the Dockerfile ENTRYPOINT and CMD. Using those
will cause the image from being unusable by the Storlet engine.
@ -129,7 +129,7 @@ The below steps illustrate the tuning process:
root@lnx-ccs8:/home/eranr/dockerfile_example# cat Dockerfile
FROM 127.0.0.1:5001/ubuntu_14.04_jre8_storlets
MAINTAINER root
RUN ["apt-get", "update"]

View File

@ -39,11 +39,11 @@ The guide uses the following addresses:
::
management / keystone / docker repository: 192.168.56.200
proxy 192.168.56.210
object1 192.168.56.220
object2 192.168.56.230
object3 192.168.56.240
management / keystone / docker repository: 192.168.56.200
proxy 192.168.56.210
object1 192.168.56.220
object2 192.168.56.230
object3 192.168.56.240
.. note::
The Ansible installation scripts used throughout the deployment assume that the user root
@ -101,13 +101,13 @@ deployment example.
storlets_default_tenant_name: "test"
storlets_default_tenant_user_name: "tester"
storlets_default_tenant_user_password: "testing"
keystone_endpoint_host: 192.168.56.200
keystone_endpoint_host: 192.168.56.200
keystone_admin_token: ADMIN
swift_endpoint_host: 192.168.56.210
swift_endpoint_port: 80
swift_run_time_user: swift
swift_run_time_group: swift
lxc_device: /srv/node/sdb
lxc_device: /srv/node/sdb
This file is used for creating the cluster_config.json which is
used by the Ansible installation. We give a full description of

View File

@ -56,7 +56,7 @@ To configure Keystone you would also need to:
::
sudo apt-get install python-openstackclient
Initial Keystone Configutation
------------------------------
@ -66,8 +66,8 @@ change the below command accordingly.
::
openstack --os-token ADMIN --os-url http://127.0.0.1:35357/v2.0/ service create identity
openstack --os-token ADMIN --os-url http://127.0.0.1:35357/v2.0/ endpoint create --publicurl http://127.0.0.1:5000/v2.0 --adminurl http://127.0.0.1:35357/v2.0 identity
openstack --os-token ADMIN --os-url http://127.0.0.1:35357/v2.0/ service create identity
openstack --os-token ADMIN --os-url http://127.0.0.1:35357/v2.0/ endpoint create --publicurl http://127.0.0.1:5000/v2.0 --adminurl http://127.0.0.1:35357/v2.0 identity
openstack --os-token ADMIN --os-url http://127.0.0.1:35357/v2.0/ service create object-store
openstack --os-url http://127.0.0.1:35357/v2.0/ --os-token ADMIN endpoint create --publicurl 'http://127.0.0.1:8080/v1/AUTH_$(tenant_id)s' object-store
@ -214,19 +214,19 @@ with the following content:
apt-get install openjdk-8-jre -y && \
apt-get clean
COPY logback-classic-1.1.2.jar /opt/storlets/
COPY logback-classic-1.1.2.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/logback-classic-1.1.2.jar"]
COPY logback-core-1.1.2.jar /opt/storlets/
COPY logback-core-1.1.2.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/logback-core-1.1.2.jar"]
COPY logback.xml /opt/storlets/
COPY logback.xml /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/logback.xml"]
COPY slf4j-api-1.7.7.jar /opt/storlets/
COPY slf4j-api-1.7.7.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/slf4j-api-1.7.7.jar"]
COPY json_simple-1.1.jar /opt/storlets/
COPY json_simple-1.1.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/json_simple-1.1.jar"]
Build the image
@ -417,7 +417,7 @@ In a SAIO environment these would be:
/etc/swift/object-server/1.conf through /etc/swift/object-server/4.conf
otherwise the file is typically /etc/swift/object-server.conf
1. Add the storlet_handler to the object server pipline just before the slo object-server as shown below:
1. Add the storlet_handler to the object server pipline just before the slo object-server as shown below:
::

View File

@ -24,7 +24,7 @@ The main build task in build.xml is dependent on two other build tasks:
* storlet_middleware
* storlet_gateway
#. The SBus code. This is the communication module between the gateway and the Docker container. It has a transport layer written in "C" with
#. The SBus code. This is the communication module between the gateway and the Docker container. It has a transport layer written in "C" with
'bindings' to both Java and Python.
#. The Python written storlet_factory_daemon, which is packaged for installation in a Docker image
#. The Java SDaemon code, which is the daemon code that loads the storlets in run time. This code is compiled to a .jar that is later installed
@ -39,10 +39,10 @@ Two additional tasks of interest in our build.xml are the deploy_host_engine and
These tasks are based on the Ansible installation scripts and do the following:
#. deploy_host_engine would get all the code that is relevant to the host side
(python middleware and SBus) and deploy it on the hosts as descrined in the
(python middleware and SBus) and deploy it on the hosts as descrined in the
cluster_config.json file
#. deploy_container_engine, would create an updated image of the tenant defined
in the cluster_config.json and distribute it to all nodes as defined in
in the cluster_config.json and distribute it to all nodes as defined in
the configuration.
-----------------

View File

@ -44,7 +44,7 @@ The storlet is uploaded to the container named 'storlet', so '/storlet' appears
The storlet may depend on other existing libraries, which must be uploaded to the dependency container.
When uploading a storlet,
the X-Object-Meta-Storlet-Dependency header requires a value that is a comma separated list of dependencies.
The main_class_name parameter for the X-Object-Meta-Storlet-Main header specifies the class in which the invoke
The main_class_name parameter for the X-Object-Meta-Storlet-Main header specifies the class in which the invoke
method of the storlet is defined.
The X-Object-Meta-Storlet-Language header specified the language in which the storlet is run.
At present, only 'Java' is supported.
@ -89,7 +89,7 @@ The content-type of the request should be set to 'application/octet-stream'.
[PUT] /v1/{account}/dependency/{dependency_object_name}
::
'X-Object-Meta-Storlet-Dependency-Version': '1'
'X-Object-Meta-Storlet-Dependency-Permissions' : '0755'
'X-Auth-Token': {authorization_token}
@ -103,19 +103,19 @@ Once the storlet and its dependencies are deployed the storlet is ready for invo
Storlets can be invoked in 3 ways:
#. Invocation upon GET.
In this case the user gets a transformation of the object residing in the store (as opposed to the actual object).
One use case for GET is anonymization, where the user might not have access to certain data unless it is
being anonymized by some storlet.
In this case the user gets a transformation of the object residing in the store (as opposed to the actual object).
One use case for GET is anonymization, where the user might not have access to certain data unless it is
being anonymized by some storlet.
#. Invocation upon PUT.
In this case the data kept in the object store is a transformation of the object uploaded by the user
(as opposed to the original data or metadata).
A typical use case is metadata enrichment, where a Storlet extracts format specific metadata from the uploaded data
and adds it as Swift metadata.
In this case the data kept in the object store is a transformation of the object uploaded by the user
(as opposed to the original data or metadata).
A typical use case is metadata enrichment, where a Storlet extracts format specific metadata from the uploaded data
and adds it as Swift metadata.
#. Invocation upon COPY.
In this case the storlet acts on data that is in the object store, generating a new object. A typical use case is
thumbnail extraction from an existing jpg.
In this case the storlet acts on data that is in the object store, generating a new object. A typical use case is
thumbnail extraction from an existing jpg.
Invocation involves adding an extra header ('X-Run-Storlet') to the Swift original PUT/GET/COPY requests.
Additional details and examples can be found in <https://github.com/openstack/storlets/blob/master/doc/source/invoking_storlets.rst>.
@ -162,7 +162,7 @@ Invoke a storlet upon object upload
::
[PUT] /v1/{account}/{container}/{object}
An additional header ('X-Run-Storlet') must be provided to inform the system to run a storlet.
::

View File

@ -14,7 +14,7 @@ Creating a Storlet enabled Tenant
The operation of creating a Storlet enabled tenant is made of the following steps:
#. Create a new tenant in Keystone, together with a tenant admin user.
#. Enable the corresponding Swift account for storlets, including the creation of the Storlet specific containers
#. Enable the corresponding Swift account for storlets, including the creation of the Storlet specific containers
whose default names are: storlet, dependency, storletlog and docker_images
#. Upload the default Storlets image to the account's docker_images container.
@ -33,7 +33,7 @@ were provided to the initial installation script as described in <https://github
Below is a sample invocation:
::
::
root@lnx-ccs8:/opt/ibm# ./add_new_tenant.py
./add_new_tenant.py <tenant_name> <user_name> <user_password>
@ -94,12 +94,12 @@ Below is a sample invocation:
Deploying a Tenant Image
========================
Recall that in the Docker image build (described in <https://github.com/openstack/storlets/blob/master/doc/source/building_and_deploying_docker_images.rst>) the image was given a name
(specified after -t in the docker build command) and was uploaded as a .tar file to the tenant's docker_images Swift container.
(specified after -t in the docker build command) and was uploaded as a .tar file to the tenant's docker_images Swift container.
When deploying an image, the Storlet's admin needs to provide the tenant name, the .tar object name and the image name.
Running the deployment task
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Following the example from the build image instructions, the image name is called service_tenant_image
Following the example from the build image instructions, the image name is called service_tenant_image
and the object name that was uploaded is service_tenant_image.tar, and so we execute:
::

View File

@ -41,7 +41,7 @@ different ways:
rather then the uploaded data and metadata.
#. Invoke a storlet on object copy. This is a way to invoke a storlet over an existing
data object, where the storlet's output is kept in a newly crerated object. In a regular
Swift copy the newly created object is a identical to the source object.
Swift copy the newly created object is a identical to the source object.
------------------
The Storlet Engine
@ -51,7 +51,7 @@ and invoke it over Swift data objects in an isolated manner (inside a Docker con
In a nutshell the engine intercepts invocation requests, route the input data stream into
the storlet and receives back the storlet output stream. The engine is implemented as a Swift
middleware.
-----
Roles
-----

View File

@ -15,4 +15,4 @@ heavier in bandwidth.
This use case was presented in the Paris Openstack summit, and is featured as a super user
story that can be viewed in [1]_.
.. [1] http://superuser.openstack.org/articles/docker-meets-swift-a-broadcaster-s-experience
.. [1] http://superuser.openstack.org/articles/docker-meets-swift-a-broadcaster-s-experience

View File

@ -5,7 +5,7 @@ It is said that the primary use case for object stores is to serve as secondary
storage. With the increasing amount of data being gathered and analysed
(have someone said IoT?) much of this data will make it to seconadry storage.
Being kept on secondary storage does not mean that the data does not
Being kept on secondary storage does not mean that the data does not
need to be queriable anymore: A recent identified trend may be searched
for in older data that was moved to secondary storage. Storlets allow an
efficient and simple querying of data that resides in Swift.

View File

@ -1,5 +1,5 @@
===================================
Storlet writing and deploying guide
Storlet writing and deploying guide
===================================
Currently, storlets must be written in Java. Writing a storlet involves
implementing a single method interface and following some simple rules and best
@ -29,7 +29,7 @@ The interface has a single method that looks like this:
Below is a class diagram illustrating the classes involved in the above API.
.. image:: images/java_prog_model.jpg
.. image:: images/java_prog_model.jpg
:height: 960px
:width: 1216 px
:scale: 50 %
@ -213,7 +213,7 @@ variables or via command line parameters. To make the commands more readable we
use environment variables as shown below. The actual values are aligned with the
development environment installation instructions_
.. _instructions: engine_dev_installation.html
.. _instructions: engine_dev_installation.html
::

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# Limitations under the License.
#---------------------------------------------------------------------------
ansible_ssh_user: <ANSIBLE_USER>
repo_root: <STORLETS_REPO_ROOT>
mgmt_group: [ "127.0.0.1" ]
@ -33,4 +32,4 @@ swift_endpoint_host: 127.0.0.1
swift_endpoint_port: 80
swift_run_time_user: <SWIFT_RUNTIME_USER>
swift_run_time_group: <SWIFT_RUNTIME_GROUP>
lxc_device: /home/docker_device
lxc_device: /home/docker_device

View File

@ -27,14 +27,14 @@ RUN apt-get update && \
apt-get install openjdk-8-jre -y && \
apt-get clean
COPY logback-classic-1.1.2.jar /opt/storlets/
COPY logback-classic-1.1.2.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/logback-classic-1.1.2.jar"]
COPY logback-core-1.1.2.jar /opt/storlets/
COPY logback-core-1.1.2.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/logback-core-1.1.2.jar"]
COPY slf4j-api-1.7.7.jar /opt/storlets/
COPY slf4j-api-1.7.7.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/slf4j-api-1.7.7.jar"]
COPY json_simple-1.1.jar /opt/storlets/
COPY json_simple-1.1.jar /opt/storlets/
RUN ["chmod", "0744", "/opt/storlets/json_simple-1.1.jar"]

View File

@ -1,7 +1,7 @@
Installation process:
1. Decide on swift device: either loop or real. Once decided - set vars.yml-sample accordingly and copy to vars.yml
1. Decide on swift device: either loop or real. Once decided - set vars.yml-sample accordingly and copy to vars.yml
2. Prepare the loop device if this is the choice
3. Create the cluster_config file that describes the cluster based on a template.
4. pull the Swift ansible installation script
4. pull the Swift ansible installation script
5. Copy cluster_config to the proper location of the Swift ansible installation script
6. Invoke swift installation: cd to repo/provisioniing, ansible-playbook tralala

View File

@ -7,7 +7,7 @@ set -eu
# This is a dev oriented Swift installation that
# uses Keystone and a single device for all rings.
# TODO: Move swift ansible scripts pull from here
# to the swift-install module
# to the swift-install module
# The script takes a block device name as an optional parameter
# The device name can be either 'loop0' or any block device under /dev

View File

@ -12,7 +12,6 @@
# See the License for the specific language governing permissions and
# Limitations under the License.
#---------------------------------------------------------------------------
swift_install_reop_dir: <set dir!>
#swift_install_repo_url: https://github.com/Open-I-Beam/swift-install.git
swift_install_repo_url: https://github.com/eranr/storlets-swift-install.git