Sample adapter configuration file for the File/Hadoop CSV Input adapter.
<?xml version="1.0" encoding="utf-8"?>
<Adapter>
<Name>file_csv_input</Name>
<Description>An adapter which gets csv data from files on local hard disk, transforms to ESP data format, and publishes to ESP stream.</Description>
<Log4jProperty>./log4j.properties</Log4jProperty>
<Modules>
<Module type="transporter">
<InstanceName>FileInputTransporter</InstanceName>
<Name>FileInputTransporter</Name>
<Next>CsvStringToEspFormatter</Next>
<BufferMaxSize>10240</BufferMaxSize>
<Parameters>
<FileInputTransporterParameters>
<Dir>./data</Dir>
<File>input.csv</File>
<AccessMode>rowBased</AccessMode>
<RemoveAfterProcess>false</RemoveAfterProcess>
<ScanDepth>5</ScanDepth>
</FileInputTransporterParameters>
</Parameters>
</Module>
<Module type="formatter">
<InstanceName>CsvStringToEspFormatter</InstanceName>
<Name>CsvStringToEspFormatter</Name>
<Next>MyInStream_Publisher</Next>
<Parallel>true</Parallel>
<Parameters>
<CsvStringToEspFormatterParameters>
<ExpectStreamNameOpcode>true</ExpectStreamNameOpcode>
</CsvStringToEspFormatterParameters>
</Parameters>
</Module>
<Module type="espconnector">
<InstanceName>MyInStream_Publisher</InstanceName>
<Name>EspPublisher</Name>
<Parameters>
<EspPublisherParameters>
<!--Uncomment the following 2 elements when you use 'start_adapter[.bat|.sh]' -->
<!--ProjectName>EspProject1</ProjectName-->
<!--StreamName>BaseInput</StreamName-->
<MaxPubPoolSize>1</MaxPubPoolSize>
<UseTransactions>false</UseTransactions>
<SafeOps>false</SafeOps>
<SkipDels>false</SkipDels>
</EspPublisherParameters>
</Parameters>
<BufferMaxSize>10240</BufferMaxSize>
</Module>
</Modules>
<EspProjects>
<EspProject>
<Name>EspProject1</Name>
<Uri>esp://localhost:19011/sample_workspace/file_csv_input</Uri>
<Security>
<User></User>
<Password encrypted="false"></Password>
<AuthType>user_password</AuthType>
</Security>
</EspProject>
</EspProjects>
<GlobalParameters></GlobalParameters>
</Adapter>